diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/data_store_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/data_store_service.rst new file mode 100644 index 000000000000..31bf2ead74b9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/data_store_service.rst @@ -0,0 +1,10 @@ +DataStoreService +---------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1.services.data_store_service + :members: + :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1.services.data_store_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/engine_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/engine_service.rst new file mode 100644 index 000000000000..79465089ab0f --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/engine_service.rst @@ -0,0 +1,10 @@ +EngineService +------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1.services.engine_service + :members: + :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1.services.engine_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst index f93a1c0cd465..d931db802583 100644 --- a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst @@ -5,7 +5,10 @@ Services for Google Cloud Discoveryengine v1 API completion_service conversational_search_service + data_store_service document_service + engine_service schema_service search_service + site_search_engine_service user_event_service diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/site_search_engine_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/site_search_engine_service.rst new file mode 100644 index 000000000000..57ce1175b88a --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/site_search_engine_service.rst @@ -0,0 +1,10 @@ +SiteSearchEngineService +----------------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1.services.site_search_engine_service + :members: + :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1.services.site_search_engine_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/data_store_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/data_store_service.rst new file mode 100644 index 000000000000..0ac1e3726133 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/data_store_service.rst @@ -0,0 +1,10 @@ +DataStoreService +---------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1beta.services.data_store_service + :members: + :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1beta.services.data_store_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/engine_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/engine_service.rst new file mode 100644 index 000000000000..30f1c1287328 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/engine_service.rst @@ -0,0 +1,10 @@ +EngineService +------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1beta.services.engine_service + :members: + :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1beta.services.engine_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/services_.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/services_.rst index ad346c2c29fe..82e3a9a0da4f 100644 --- a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/services_.rst +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/services_.rst @@ -5,8 +5,12 @@ Services for Google Cloud Discoveryengine v1beta API completion_service conversational_search_service + data_store_service document_service + engine_service recommendation_service schema_service search_service + serving_config_service + site_search_engine_service user_event_service diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/serving_config_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/serving_config_service.rst new file mode 100644 index 000000000000..204224ebc133 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/serving_config_service.rst @@ -0,0 +1,10 @@ +ServingConfigService +-------------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1beta.services.serving_config_service + :members: + :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1beta.services.serving_config_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/site_search_engine_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/site_search_engine_service.rst new file mode 100644 index 000000000000..d8b5c80d226d --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1beta/site_search_engine_service.rst @@ -0,0 +1,10 @@ +SiteSearchEngineService +----------------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1beta.services.site_search_engine_service + :members: + :inherited-members: + +.. automodule:: google.cloud.discoveryengine_v1beta.services.site_search_engine_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py index d9be8548a92e..4ad623f8ddf5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py @@ -30,12 +30,24 @@ from google.cloud.discoveryengine_v1beta.services.conversational_search_service.client import ( ConversationalSearchServiceClient, ) +from google.cloud.discoveryengine_v1beta.services.data_store_service.async_client import ( + DataStoreServiceAsyncClient, +) +from google.cloud.discoveryengine_v1beta.services.data_store_service.client import ( + DataStoreServiceClient, +) from google.cloud.discoveryengine_v1beta.services.document_service.async_client import ( DocumentServiceAsyncClient, ) from google.cloud.discoveryengine_v1beta.services.document_service.client import ( DocumentServiceClient, ) +from google.cloud.discoveryengine_v1beta.services.engine_service.async_client import ( + EngineServiceAsyncClient, +) +from google.cloud.discoveryengine_v1beta.services.engine_service.client import ( + EngineServiceClient, +) from google.cloud.discoveryengine_v1beta.services.recommendation_service.async_client import ( RecommendationServiceAsyncClient, ) @@ -54,6 +66,18 @@ from google.cloud.discoveryengine_v1beta.services.search_service.client import ( SearchServiceClient, ) +from google.cloud.discoveryengine_v1beta.services.serving_config_service.async_client import ( + ServingConfigServiceAsyncClient, +) +from google.cloud.discoveryengine_v1beta.services.serving_config_service.client import ( + ServingConfigServiceClient, +) +from google.cloud.discoveryengine_v1beta.services.site_search_engine_service.async_client import ( + SiteSearchEngineServiceAsyncClient, +) +from google.cloud.discoveryengine_v1beta.services.site_search_engine_service.client import ( + SiteSearchEngineServiceClient, +) from google.cloud.discoveryengine_v1beta.services.user_event_service.async_client import ( UserEventServiceAsyncClient, ) @@ -63,9 +87,15 @@ from google.cloud.discoveryengine_v1beta.types.common import ( CustomAttribute, DoubleList, + EmbeddingConfig, + IndustryVertical, Interval, + SearchAddOn, + SearchTier, + SolutionType, UserInfo, ) +from google.cloud.discoveryengine_v1beta.types.completion import SuggestionDenyListEntry from google.cloud.discoveryengine_v1beta.types.completion_service import ( CompleteQueryRequest, CompleteQueryResponse, @@ -87,6 +117,17 @@ ListConversationsResponse, UpdateConversationRequest, ) +from google.cloud.discoveryengine_v1beta.types.data_store import DataStore +from google.cloud.discoveryengine_v1beta.types.data_store_service import ( + CreateDataStoreMetadata, + CreateDataStoreRequest, + DeleteDataStoreMetadata, + DeleteDataStoreRequest, + GetDataStoreRequest, + ListDataStoresRequest, + ListDataStoresResponse, + UpdateDataStoreRequest, +) from google.cloud.discoveryengine_v1beta.types.document import Document from google.cloud.discoveryengine_v1beta.types.document_service import ( CreateDocumentRequest, @@ -96,6 +137,17 @@ ListDocumentsResponse, UpdateDocumentRequest, ) +from google.cloud.discoveryengine_v1beta.types.engine import Engine +from google.cloud.discoveryengine_v1beta.types.engine_service import ( + CreateEngineMetadata, + CreateEngineRequest, + DeleteEngineMetadata, + DeleteEngineRequest, + GetEngineRequest, + ListEnginesRequest, + ListEnginesResponse, + UpdateEngineRequest, +) from google.cloud.discoveryengine_v1beta.types.import_config import ( BigQuerySource, GcsSource, @@ -103,6 +155,9 @@ ImportDocumentsRequest, ImportDocumentsResponse, ImportErrorConfig, + ImportSuggestionDenyListEntriesMetadata, + ImportSuggestionDenyListEntriesRequest, + ImportSuggestionDenyListEntriesResponse, ImportUserEventsMetadata, ImportUserEventsRequest, ImportUserEventsResponse, @@ -111,6 +166,9 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeSuggestionDenyListEntriesMetadata, + PurgeSuggestionDenyListEntriesRequest, + PurgeSuggestionDenyListEntriesResponse, ) from google.cloud.discoveryengine_v1beta.types.recommendation_service import ( RecommendRequest, @@ -132,6 +190,47 @@ SearchRequest, SearchResponse, ) +from google.cloud.discoveryengine_v1beta.types.serving_config import ServingConfig +from google.cloud.discoveryengine_v1beta.types.serving_config_service import ( + GetServingConfigRequest, + ListServingConfigsRequest, + ListServingConfigsResponse, + UpdateServingConfigRequest, +) +from google.cloud.discoveryengine_v1beta.types.site_search_engine import ( + SiteSearchEngine, + SiteVerificationInfo, + TargetSite, +) +from google.cloud.discoveryengine_v1beta.types.site_search_engine_service import ( + BatchCreateTargetSiteMetadata, + BatchCreateTargetSitesRequest, + BatchCreateTargetSitesResponse, + BatchVerifyTargetSitesMetadata, + BatchVerifyTargetSitesRequest, + BatchVerifyTargetSitesResponse, + CreateTargetSiteMetadata, + CreateTargetSiteRequest, + DeleteTargetSiteMetadata, + DeleteTargetSiteRequest, + DisableAdvancedSiteSearchMetadata, + DisableAdvancedSiteSearchRequest, + DisableAdvancedSiteSearchResponse, + EnableAdvancedSiteSearchMetadata, + EnableAdvancedSiteSearchRequest, + EnableAdvancedSiteSearchResponse, + FetchDomainVerificationStatusRequest, + FetchDomainVerificationStatusResponse, + GetSiteSearchEngineRequest, + GetTargetSiteRequest, + ListTargetSitesRequest, + ListTargetSitesResponse, + RecrawlUrisMetadata, + RecrawlUrisRequest, + RecrawlUrisResponse, + UpdateTargetSiteMetadata, + UpdateTargetSiteRequest, +) from google.cloud.discoveryengine_v1beta.types.user_event import ( CompletionInfo, DocumentInfo, @@ -152,20 +251,34 @@ "CompletionServiceAsyncClient", "ConversationalSearchServiceClient", "ConversationalSearchServiceAsyncClient", + "DataStoreServiceClient", + "DataStoreServiceAsyncClient", "DocumentServiceClient", "DocumentServiceAsyncClient", + "EngineServiceClient", + "EngineServiceAsyncClient", "RecommendationServiceClient", "RecommendationServiceAsyncClient", "SchemaServiceClient", "SchemaServiceAsyncClient", "SearchServiceClient", "SearchServiceAsyncClient", + "ServingConfigServiceClient", + "ServingConfigServiceAsyncClient", + "SiteSearchEngineServiceClient", + "SiteSearchEngineServiceAsyncClient", "UserEventServiceClient", "UserEventServiceAsyncClient", "CustomAttribute", "DoubleList", + "EmbeddingConfig", "Interval", "UserInfo", + "IndustryVertical", + "SearchAddOn", + "SearchTier", + "SolutionType", + "SuggestionDenyListEntry", "CompleteQueryRequest", "CompleteQueryResponse", "Conversation", @@ -181,6 +294,15 @@ "ListConversationsRequest", "ListConversationsResponse", "UpdateConversationRequest", + "DataStore", + "CreateDataStoreMetadata", + "CreateDataStoreRequest", + "DeleteDataStoreMetadata", + "DeleteDataStoreRequest", + "GetDataStoreRequest", + "ListDataStoresRequest", + "ListDataStoresResponse", + "UpdateDataStoreRequest", "Document", "CreateDocumentRequest", "DeleteDocumentRequest", @@ -188,18 +310,33 @@ "ListDocumentsRequest", "ListDocumentsResponse", "UpdateDocumentRequest", + "Engine", + "CreateEngineMetadata", + "CreateEngineRequest", + "DeleteEngineMetadata", + "DeleteEngineRequest", + "GetEngineRequest", + "ListEnginesRequest", + "ListEnginesResponse", + "UpdateEngineRequest", "BigQuerySource", "GcsSource", "ImportDocumentsMetadata", "ImportDocumentsRequest", "ImportDocumentsResponse", "ImportErrorConfig", + "ImportSuggestionDenyListEntriesMetadata", + "ImportSuggestionDenyListEntriesRequest", + "ImportSuggestionDenyListEntriesResponse", "ImportUserEventsMetadata", "ImportUserEventsRequest", "ImportUserEventsResponse", "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeSuggestionDenyListEntriesMetadata", + "PurgeSuggestionDenyListEntriesRequest", + "PurgeSuggestionDenyListEntriesResponse", "RecommendRequest", "RecommendResponse", "Schema", @@ -214,6 +351,41 @@ "UpdateSchemaRequest", "SearchRequest", "SearchResponse", + "ServingConfig", + "GetServingConfigRequest", + "ListServingConfigsRequest", + "ListServingConfigsResponse", + "UpdateServingConfigRequest", + "SiteSearchEngine", + "SiteVerificationInfo", + "TargetSite", + "BatchCreateTargetSiteMetadata", + "BatchCreateTargetSitesRequest", + "BatchCreateTargetSitesResponse", + "BatchVerifyTargetSitesMetadata", + "BatchVerifyTargetSitesRequest", + "BatchVerifyTargetSitesResponse", + "CreateTargetSiteMetadata", + "CreateTargetSiteRequest", + "DeleteTargetSiteMetadata", + "DeleteTargetSiteRequest", + "DisableAdvancedSiteSearchMetadata", + "DisableAdvancedSiteSearchRequest", + "DisableAdvancedSiteSearchResponse", + "EnableAdvancedSiteSearchMetadata", + "EnableAdvancedSiteSearchRequest", + "EnableAdvancedSiteSearchResponse", + "FetchDomainVerificationStatusRequest", + "FetchDomainVerificationStatusResponse", + "GetSiteSearchEngineRequest", + "GetTargetSiteRequest", + "ListTargetSitesRequest", + "ListTargetSitesResponse", + "RecrawlUrisMetadata", + "RecrawlUrisRequest", + "RecrawlUrisResponse", + "UpdateTargetSiteMetadata", + "UpdateTargetSiteRequest", "CompletionInfo", "DocumentInfo", "MediaInfo", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py index fb9a6cb2d900..360a0d13ebdd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.7" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py index a94d7414d47a..15a7b25ac5c0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py @@ -26,14 +26,32 @@ ConversationalSearchServiceAsyncClient, ConversationalSearchServiceClient, ) +from .services.data_store_service import ( + DataStoreServiceAsyncClient, + DataStoreServiceClient, +) from .services.document_service import DocumentServiceAsyncClient, DocumentServiceClient +from .services.engine_service import EngineServiceAsyncClient, EngineServiceClient from .services.schema_service import SchemaServiceAsyncClient, SchemaServiceClient from .services.search_service import SearchServiceAsyncClient, SearchServiceClient +from .services.site_search_engine_service import ( + SiteSearchEngineServiceAsyncClient, + SiteSearchEngineServiceClient, +) from .services.user_event_service import ( UserEventServiceAsyncClient, UserEventServiceClient, ) -from .types.common import CustomAttribute, Interval, UserInfo +from .types.common import ( + CustomAttribute, + IndustryVertical, + Interval, + SearchAddOn, + SearchTier, + SolutionType, + UserInfo, +) +from .types.completion import SuggestionDenyListEntry from .types.completion_service import CompleteQueryRequest, CompleteQueryResponse from .types.conversation import ( Conversation, @@ -52,6 +70,17 @@ ListConversationsResponse, UpdateConversationRequest, ) +from .types.data_store import DataStore +from .types.data_store_service import ( + CreateDataStoreMetadata, + CreateDataStoreRequest, + DeleteDataStoreMetadata, + DeleteDataStoreRequest, + GetDataStoreRequest, + ListDataStoresRequest, + ListDataStoresResponse, + UpdateDataStoreRequest, +) from .types.document import Document from .types.document_service import ( CreateDocumentRequest, @@ -61,6 +90,17 @@ ListDocumentsResponse, UpdateDocumentRequest, ) +from .types.engine import Engine +from .types.engine_service import ( + CreateEngineMetadata, + CreateEngineRequest, + DeleteEngineMetadata, + DeleteEngineRequest, + GetEngineRequest, + ListEnginesRequest, + ListEnginesResponse, + UpdateEngineRequest, +) from .types.import_config import ( BigQuerySource, GcsSource, @@ -68,6 +108,9 @@ ImportDocumentsRequest, ImportDocumentsResponse, ImportErrorConfig, + ImportSuggestionDenyListEntriesMetadata, + ImportSuggestionDenyListEntriesRequest, + ImportSuggestionDenyListEntriesResponse, ImportUserEventsMetadata, ImportUserEventsRequest, ImportUserEventsResponse, @@ -76,6 +119,9 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeSuggestionDenyListEntriesMetadata, + PurgeSuggestionDenyListEntriesRequest, + PurgeSuggestionDenyListEntriesResponse, ) from .types.schema import Schema from .types.schema_service import ( @@ -90,6 +136,36 @@ UpdateSchemaRequest, ) from .types.search_service import SearchRequest, SearchResponse +from .types.site_search_engine import SiteSearchEngine, SiteVerificationInfo, TargetSite +from .types.site_search_engine_service import ( + BatchCreateTargetSiteMetadata, + BatchCreateTargetSitesRequest, + BatchCreateTargetSitesResponse, + BatchVerifyTargetSitesMetadata, + BatchVerifyTargetSitesRequest, + BatchVerifyTargetSitesResponse, + CreateTargetSiteMetadata, + CreateTargetSiteRequest, + DeleteTargetSiteMetadata, + DeleteTargetSiteRequest, + DisableAdvancedSiteSearchMetadata, + DisableAdvancedSiteSearchRequest, + DisableAdvancedSiteSearchResponse, + EnableAdvancedSiteSearchMetadata, + EnableAdvancedSiteSearchRequest, + EnableAdvancedSiteSearchResponse, + FetchDomainVerificationStatusRequest, + FetchDomainVerificationStatusResponse, + GetSiteSearchEngineRequest, + GetTargetSiteRequest, + ListTargetSitesRequest, + ListTargetSitesResponse, + RecrawlUrisMetadata, + RecrawlUrisRequest, + RecrawlUrisResponse, + UpdateTargetSiteMetadata, + UpdateTargetSiteRequest, +) from .types.user_event import ( CompletionInfo, DocumentInfo, @@ -105,10 +181,19 @@ __all__ = ( "CompletionServiceAsyncClient", "ConversationalSearchServiceAsyncClient", + "DataStoreServiceAsyncClient", "DocumentServiceAsyncClient", + "EngineServiceAsyncClient", "SchemaServiceAsyncClient", "SearchServiceAsyncClient", + "SiteSearchEngineServiceAsyncClient", "UserEventServiceAsyncClient", + "BatchCreateTargetSiteMetadata", + "BatchCreateTargetSitesRequest", + "BatchCreateTargetSitesResponse", + "BatchVerifyTargetSitesMetadata", + "BatchVerifyTargetSitesRequest", + "BatchVerifyTargetSitesResponse", "BigQuerySource", "CollectUserEventRequest", "CompleteQueryRequest", @@ -122,54 +207,110 @@ "ConverseConversationRequest", "ConverseConversationResponse", "CreateConversationRequest", + "CreateDataStoreMetadata", + "CreateDataStoreRequest", "CreateDocumentRequest", + "CreateEngineMetadata", + "CreateEngineRequest", "CreateSchemaMetadata", "CreateSchemaRequest", + "CreateTargetSiteMetadata", + "CreateTargetSiteRequest", "CustomAttribute", + "DataStore", + "DataStoreServiceClient", "DeleteConversationRequest", + "DeleteDataStoreMetadata", + "DeleteDataStoreRequest", "DeleteDocumentRequest", + "DeleteEngineMetadata", + "DeleteEngineRequest", "DeleteSchemaMetadata", "DeleteSchemaRequest", + "DeleteTargetSiteMetadata", + "DeleteTargetSiteRequest", + "DisableAdvancedSiteSearchMetadata", + "DisableAdvancedSiteSearchRequest", + "DisableAdvancedSiteSearchResponse", "Document", "DocumentInfo", "DocumentServiceClient", + "EnableAdvancedSiteSearchMetadata", + "EnableAdvancedSiteSearchRequest", + "EnableAdvancedSiteSearchResponse", + "Engine", + "EngineServiceClient", + "FetchDomainVerificationStatusRequest", + "FetchDomainVerificationStatusResponse", "GcsSource", "GetConversationRequest", + "GetDataStoreRequest", "GetDocumentRequest", + "GetEngineRequest", "GetSchemaRequest", + "GetSiteSearchEngineRequest", + "GetTargetSiteRequest", "ImportDocumentsMetadata", "ImportDocumentsRequest", "ImportDocumentsResponse", "ImportErrorConfig", + "ImportSuggestionDenyListEntriesMetadata", + "ImportSuggestionDenyListEntriesRequest", + "ImportSuggestionDenyListEntriesResponse", "ImportUserEventsMetadata", "ImportUserEventsRequest", "ImportUserEventsResponse", + "IndustryVertical", "Interval", "ListConversationsRequest", "ListConversationsResponse", + "ListDataStoresRequest", + "ListDataStoresResponse", "ListDocumentsRequest", "ListDocumentsResponse", + "ListEnginesRequest", + "ListEnginesResponse", "ListSchemasRequest", "ListSchemasResponse", + "ListTargetSitesRequest", + "ListTargetSitesResponse", "MediaInfo", "PageInfo", "PanelInfo", "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeSuggestionDenyListEntriesMetadata", + "PurgeSuggestionDenyListEntriesRequest", + "PurgeSuggestionDenyListEntriesResponse", + "RecrawlUrisMetadata", + "RecrawlUrisRequest", + "RecrawlUrisResponse", "Reply", "Schema", "SchemaServiceClient", + "SearchAddOn", "SearchInfo", "SearchRequest", "SearchResponse", "SearchServiceClient", + "SearchTier", + "SiteSearchEngine", + "SiteSearchEngineServiceClient", + "SiteVerificationInfo", + "SolutionType", + "SuggestionDenyListEntry", + "TargetSite", "TextInput", "TransactionInfo", "UpdateConversationRequest", + "UpdateDataStoreRequest", "UpdateDocumentRequest", + "UpdateEngineRequest", "UpdateSchemaMetadata", "UpdateSchemaRequest", + "UpdateTargetSiteMetadata", + "UpdateTargetSiteRequest", "UserEvent", "UserEventServiceClient", "UserInfo", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json index 40ef7a0de6c2..1bf52b2c5e3f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json @@ -14,6 +14,16 @@ "methods": [ "complete_query" ] + }, + "ImportSuggestionDenyListEntries": { + "methods": [ + "import_suggestion_deny_list_entries" + ] + }, + "PurgeSuggestionDenyListEntries": { + "methods": [ + "purge_suggestion_deny_list_entries" + ] } } }, @@ -24,6 +34,16 @@ "methods": [ "complete_query" ] + }, + "ImportSuggestionDenyListEntries": { + "methods": [ + "import_suggestion_deny_list_entries" + ] + }, + "PurgeSuggestionDenyListEntries": { + "methods": [ + "purge_suggestion_deny_list_entries" + ] } } }, @@ -34,6 +54,16 @@ "methods": [ "complete_query" ] + }, + "ImportSuggestionDenyListEntries": { + "methods": [ + "import_suggestion_deny_list_entries" + ] + }, + "PurgeSuggestionDenyListEntries": { + "methods": [ + "purge_suggestion_deny_list_entries" + ] } } } @@ -148,6 +178,100 @@ } } }, + "DataStoreService": { + "clients": { + "grpc": { + "libraryClient": "DataStoreServiceClient", + "rpcs": { + "CreateDataStore": { + "methods": [ + "create_data_store" + ] + }, + "DeleteDataStore": { + "methods": [ + "delete_data_store" + ] + }, + "GetDataStore": { + "methods": [ + "get_data_store" + ] + }, + "ListDataStores": { + "methods": [ + "list_data_stores" + ] + }, + "UpdateDataStore": { + "methods": [ + "update_data_store" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataStoreServiceAsyncClient", + "rpcs": { + "CreateDataStore": { + "methods": [ + "create_data_store" + ] + }, + "DeleteDataStore": { + "methods": [ + "delete_data_store" + ] + }, + "GetDataStore": { + "methods": [ + "get_data_store" + ] + }, + "ListDataStores": { + "methods": [ + "list_data_stores" + ] + }, + "UpdateDataStore": { + "methods": [ + "update_data_store" + ] + } + } + }, + "rest": { + "libraryClient": "DataStoreServiceClient", + "rpcs": { + "CreateDataStore": { + "methods": [ + "create_data_store" + ] + }, + "DeleteDataStore": { + "methods": [ + "delete_data_store" + ] + }, + "GetDataStore": { + "methods": [ + "get_data_store" + ] + }, + "ListDataStores": { + "methods": [ + "list_data_stores" + ] + }, + "UpdateDataStore": { + "methods": [ + "update_data_store" + ] + } + } + } + } + }, "DocumentService": { "clients": { "grpc": { @@ -272,6 +396,100 @@ } } }, + "EngineService": { + "clients": { + "grpc": { + "libraryClient": "EngineServiceClient", + "rpcs": { + "CreateEngine": { + "methods": [ + "create_engine" + ] + }, + "DeleteEngine": { + "methods": [ + "delete_engine" + ] + }, + "GetEngine": { + "methods": [ + "get_engine" + ] + }, + "ListEngines": { + "methods": [ + "list_engines" + ] + }, + "UpdateEngine": { + "methods": [ + "update_engine" + ] + } + } + }, + "grpc-async": { + "libraryClient": "EngineServiceAsyncClient", + "rpcs": { + "CreateEngine": { + "methods": [ + "create_engine" + ] + }, + "DeleteEngine": { + "methods": [ + "delete_engine" + ] + }, + "GetEngine": { + "methods": [ + "get_engine" + ] + }, + "ListEngines": { + "methods": [ + "list_engines" + ] + }, + "UpdateEngine": { + "methods": [ + "update_engine" + ] + } + } + }, + "rest": { + "libraryClient": "EngineServiceClient", + "rpcs": { + "CreateEngine": { + "methods": [ + "create_engine" + ] + }, + "DeleteEngine": { + "methods": [ + "delete_engine" + ] + }, + "GetEngine": { + "methods": [ + "get_engine" + ] + }, + "ListEngines": { + "methods": [ + "list_engines" + ] + }, + "UpdateEngine": { + "methods": [ + "update_engine" + ] + } + } + } + } + }, "SchemaService": { "clients": { "grpc": { @@ -400,6 +618,205 @@ } } }, + "SiteSearchEngineService": { + "clients": { + "grpc": { + "libraryClient": "SiteSearchEngineServiceClient", + "rpcs": { + "BatchCreateTargetSites": { + "methods": [ + "batch_create_target_sites" + ] + }, + "BatchVerifyTargetSites": { + "methods": [ + "batch_verify_target_sites" + ] + }, + "CreateTargetSite": { + "methods": [ + "create_target_site" + ] + }, + "DeleteTargetSite": { + "methods": [ + "delete_target_site" + ] + }, + "DisableAdvancedSiteSearch": { + "methods": [ + "disable_advanced_site_search" + ] + }, + "EnableAdvancedSiteSearch": { + "methods": [ + "enable_advanced_site_search" + ] + }, + "FetchDomainVerificationStatus": { + "methods": [ + "fetch_domain_verification_status" + ] + }, + "GetSiteSearchEngine": { + "methods": [ + "get_site_search_engine" + ] + }, + "GetTargetSite": { + "methods": [ + "get_target_site" + ] + }, + "ListTargetSites": { + "methods": [ + "list_target_sites" + ] + }, + "RecrawlUris": { + "methods": [ + "recrawl_uris" + ] + }, + "UpdateTargetSite": { + "methods": [ + "update_target_site" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SiteSearchEngineServiceAsyncClient", + "rpcs": { + "BatchCreateTargetSites": { + "methods": [ + "batch_create_target_sites" + ] + }, + "BatchVerifyTargetSites": { + "methods": [ + "batch_verify_target_sites" + ] + }, + "CreateTargetSite": { + "methods": [ + "create_target_site" + ] + }, + "DeleteTargetSite": { + "methods": [ + "delete_target_site" + ] + }, + "DisableAdvancedSiteSearch": { + "methods": [ + "disable_advanced_site_search" + ] + }, + "EnableAdvancedSiteSearch": { + "methods": [ + "enable_advanced_site_search" + ] + }, + "FetchDomainVerificationStatus": { + "methods": [ + "fetch_domain_verification_status" + ] + }, + "GetSiteSearchEngine": { + "methods": [ + "get_site_search_engine" + ] + }, + "GetTargetSite": { + "methods": [ + "get_target_site" + ] + }, + "ListTargetSites": { + "methods": [ + "list_target_sites" + ] + }, + "RecrawlUris": { + "methods": [ + "recrawl_uris" + ] + }, + "UpdateTargetSite": { + "methods": [ + "update_target_site" + ] + } + } + }, + "rest": { + "libraryClient": "SiteSearchEngineServiceClient", + "rpcs": { + "BatchCreateTargetSites": { + "methods": [ + "batch_create_target_sites" + ] + }, + "BatchVerifyTargetSites": { + "methods": [ + "batch_verify_target_sites" + ] + }, + "CreateTargetSite": { + "methods": [ + "create_target_site" + ] + }, + "DeleteTargetSite": { + "methods": [ + "delete_target_site" + ] + }, + "DisableAdvancedSiteSearch": { + "methods": [ + "disable_advanced_site_search" + ] + }, + "EnableAdvancedSiteSearch": { + "methods": [ + "enable_advanced_site_search" + ] + }, + "FetchDomainVerificationStatus": { + "methods": [ + "fetch_domain_verification_status" + ] + }, + "GetSiteSearchEngine": { + "methods": [ + "get_site_search_engine" + ] + }, + "GetTargetSite": { + "methods": [ + "get_target_site" + ] + }, + "ListTargetSites": { + "methods": [ + "list_target_sites" + ] + }, + "RecrawlUris": { + "methods": [ + "recrawl_uris" + ] + }, + "UpdateTargetSite": { + "methods": [ + "update_target_site" + ] + } + } + } + } + }, "UserEventService": { "clients": { "grpc": { diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py index fb9a6cb2d900..360a0d13ebdd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.7" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/async_client.py index c814601565f9..541319513998 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/async_client.py @@ -42,10 +42,16 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore -from google.cloud.discoveryengine_v1.types import completion_service +from google.cloud.discoveryengine_v1.types import ( + completion_service, + import_config, + purge_config, +) from .client import CompletionServiceClient from .transports.base import DEFAULT_CLIENT_INFO, CompletionServiceTransport @@ -343,6 +349,217 @@ async def sample_complete_query(): # Done; return the response. return response + async def import_suggestion_deny_list_entries( + self, + request: Optional[ + Union[import_config.ImportSuggestionDenyListEntriesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1.SuggestionDenyListEntry] + for a DataStore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_import_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1.CompletionServiceAsyncClient() + + # Initialize request argument(s) + inline_source = discoveryengine_v1.InlineSource() + inline_source.entries.block_phrase = "block_phrase_value" + inline_source.entries.match_operator = "CONTAINS" + + request = discoveryengine_v1.ImportSuggestionDenyListEntriesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.ImportSuggestionDenyListEntriesRequest, dict]]): + The request object. Request message for + [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.ImportSuggestionDenyListEntries] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.ImportSuggestionDenyListEntriesResponse` Response message for + [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.ImportSuggestionDenyListEntries] + method. + + """ + # Create or coerce a protobuf request object. + request = import_config.ImportSuggestionDenyListEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_suggestion_deny_list_entries, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + import_config.ImportSuggestionDenyListEntriesResponse, + metadata_type=import_config.ImportSuggestionDenyListEntriesMetadata, + ) + + # Done; return the response. + return response + + async def purge_suggestion_deny_list_entries( + self, + request: Optional[ + Union[purge_config.PurgeSuggestionDenyListEntriesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Permanently deletes all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1.SuggestionDenyListEntry] + for a DataStore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_purge_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1.CompletionServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeSuggestionDenyListEntriesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.purge_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.PurgeSuggestionDenyListEntriesRequest, dict]]): + The request object. Request message for + [CompletionService.PurgeSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.PurgeSuggestionDenyListEntries] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.PurgeSuggestionDenyListEntriesResponse` Response message for + [CompletionService.PurgeSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.PurgeSuggestionDenyListEntries] + method. + + """ + # Create or coerce a protobuf request object. + request = purge_config.PurgeSuggestionDenyListEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.purge_suggestion_deny_list_entries, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + purge_config.PurgeSuggestionDenyListEntriesResponse, + metadata_type=purge_config.PurgeSuggestionDenyListEntriesMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py index fc4d497d96fd..4f6b146485d8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py @@ -47,10 +47,16 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore -from google.cloud.discoveryengine_v1.types import completion_service +from google.cloud.discoveryengine_v1.types import ( + completion_service, + import_config, + purge_config, +) from .transports.base import DEFAULT_CLIENT_INFO, CompletionServiceTransport from .transports.grpc import CompletionServiceGrpcTransport @@ -757,6 +763,225 @@ def sample_complete_query(): # Done; return the response. return response + def import_suggestion_deny_list_entries( + self, + request: Optional[ + Union[import_config.ImportSuggestionDenyListEntriesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1.SuggestionDenyListEntry] + for a DataStore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_import_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1.CompletionServiceClient() + + # Initialize request argument(s) + inline_source = discoveryengine_v1.InlineSource() + inline_source.entries.block_phrase = "block_phrase_value" + inline_source.entries.match_operator = "CONTAINS" + + request = discoveryengine_v1.ImportSuggestionDenyListEntriesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.ImportSuggestionDenyListEntriesRequest, dict]): + The request object. Request message for + [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.ImportSuggestionDenyListEntries] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.ImportSuggestionDenyListEntriesResponse` Response message for + [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.ImportSuggestionDenyListEntries] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a import_config.ImportSuggestionDenyListEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, import_config.ImportSuggestionDenyListEntriesRequest + ): + request = import_config.ImportSuggestionDenyListEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.import_suggestion_deny_list_entries + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + import_config.ImportSuggestionDenyListEntriesResponse, + metadata_type=import_config.ImportSuggestionDenyListEntriesMetadata, + ) + + # Done; return the response. + return response + + def purge_suggestion_deny_list_entries( + self, + request: Optional[ + Union[purge_config.PurgeSuggestionDenyListEntriesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Permanently deletes all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1.SuggestionDenyListEntry] + for a DataStore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_purge_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1.CompletionServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeSuggestionDenyListEntriesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.purge_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.PurgeSuggestionDenyListEntriesRequest, dict]): + The request object. Request message for + [CompletionService.PurgeSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.PurgeSuggestionDenyListEntries] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.PurgeSuggestionDenyListEntriesResponse` Response message for + [CompletionService.PurgeSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.PurgeSuggestionDenyListEntries] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a purge_config.PurgeSuggestionDenyListEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, purge_config.PurgeSuggestionDenyListEntriesRequest): + request = purge_config.PurgeSuggestionDenyListEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.purge_suggestion_deny_list_entries + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + purge_config.PurgeSuggestionDenyListEntriesResponse, + metadata_type=purge_config.PurgeSuggestionDenyListEntriesMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "CompletionServiceClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/base.py index be1a4caf81fa..9a37df64f753 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/base.py @@ -18,7 +18,7 @@ import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore @@ -27,7 +27,11 @@ from google.oauth2 import service_account # type: ignore from google.cloud.discoveryengine_v1 import gapic_version as package_version -from google.cloud.discoveryengine_v1.types import completion_service +from google.cloud.discoveryengine_v1.types import ( + completion_service, + import_config, + purge_config, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -133,6 +137,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.import_suggestion_deny_list_entries: gapic_v1.method.wrap_method( + self.import_suggestion_deny_list_entries, + default_timeout=None, + client_info=client_info, + ), + self.purge_suggestion_deny_list_entries: gapic_v1.method.wrap_method( + self.purge_suggestion_deny_list_entries, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -144,6 +158,11 @@ def close(self): """ raise NotImplementedError() + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + @property def complete_query( self, @@ -156,6 +175,24 @@ def complete_query( ]: raise NotImplementedError() + @property + def import_suggestion_deny_list_entries( + self, + ) -> Callable[ + [import_config.ImportSuggestionDenyListEntriesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def purge_suggestion_deny_list_entries( + self, + ) -> Callable[ + [purge_config.PurgeSuggestionDenyListEntriesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc.py index d03aeff728bb..5635ab4e7a83 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc.py @@ -16,7 +16,7 @@ from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, grpc_helpers +from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -24,7 +24,11 @@ from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore -from google.cloud.discoveryengine_v1.types import completion_service +from google.cloud.discoveryengine_v1.types import ( + completion_service, + import_config, + purge_config, +) from .base import DEFAULT_CLIENT_INFO, CompletionServiceTransport @@ -112,6 +116,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -230,6 +235,20 @@ def grpc_channel(self) -> grpc.Channel: """Return the channel designed to connect to this service.""" return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + @property def complete_query( self, @@ -260,6 +279,72 @@ def complete_query( ) return self._stubs["complete_query"] + @property + def import_suggestion_deny_list_entries( + self, + ) -> Callable[ + [import_config.ImportSuggestionDenyListEntriesRequest], operations_pb2.Operation + ]: + r"""Return a callable for the import suggestion deny list + entries method over gRPC. + + Imports all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1.SuggestionDenyListEntry] + for a DataStore. + + Returns: + Callable[[~.ImportSuggestionDenyListEntriesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_suggestion_deny_list_entries" not in self._stubs: + self._stubs[ + "import_suggestion_deny_list_entries" + ] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.CompletionService/ImportSuggestionDenyListEntries", + request_serializer=import_config.ImportSuggestionDenyListEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_suggestion_deny_list_entries"] + + @property + def purge_suggestion_deny_list_entries( + self, + ) -> Callable[ + [purge_config.PurgeSuggestionDenyListEntriesRequest], operations_pb2.Operation + ]: + r"""Return a callable for the purge suggestion deny list + entries method over gRPC. + + Permanently deletes all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1.SuggestionDenyListEntry] + for a DataStore. + + Returns: + Callable[[~.PurgeSuggestionDenyListEntriesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_suggestion_deny_list_entries" not in self._stubs: + self._stubs[ + "purge_suggestion_deny_list_entries" + ] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.CompletionService/PurgeSuggestionDenyListEntries", + request_serializer=purge_config.PurgeSuggestionDenyListEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_suggestion_deny_list_entries"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc_asyncio.py index 40a0ac2a07e4..dd6af86c8223 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/grpc_asyncio.py @@ -16,7 +16,7 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -24,7 +24,11 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.discoveryengine_v1.types import completion_service +from google.cloud.discoveryengine_v1.types import ( + completion_service, + import_config, + purge_config, +) from .base import DEFAULT_CLIENT_INFO, CompletionServiceTransport from .grpc import CompletionServiceGrpcTransport @@ -158,6 +162,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -233,6 +238,22 @@ def grpc_channel(self) -> aio.Channel: # Return the channel from cache. return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + @property def complete_query( self, @@ -263,6 +284,74 @@ def complete_query( ) return self._stubs["complete_query"] + @property + def import_suggestion_deny_list_entries( + self, + ) -> Callable[ + [import_config.ImportSuggestionDenyListEntriesRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the import suggestion deny list + entries method over gRPC. + + Imports all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1.SuggestionDenyListEntry] + for a DataStore. + + Returns: + Callable[[~.ImportSuggestionDenyListEntriesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_suggestion_deny_list_entries" not in self._stubs: + self._stubs[ + "import_suggestion_deny_list_entries" + ] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.CompletionService/ImportSuggestionDenyListEntries", + request_serializer=import_config.ImportSuggestionDenyListEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_suggestion_deny_list_entries"] + + @property + def purge_suggestion_deny_list_entries( + self, + ) -> Callable[ + [purge_config.PurgeSuggestionDenyListEntriesRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the purge suggestion deny list + entries method over gRPC. + + Permanently deletes all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1.SuggestionDenyListEntry] + for a DataStore. + + Returns: + Callable[[~.PurgeSuggestionDenyListEntriesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_suggestion_deny_list_entries" not in self._stubs: + self._stubs[ + "purge_suggestion_deny_list_entries" + ] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.CompletionService/PurgeSuggestionDenyListEntries", + request_serializer=purge_config.PurgeSuggestionDenyListEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_suggestion_deny_list_entries"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py index 473e5905ad40..77d8632c66d7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py @@ -20,7 +20,13 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore @@ -39,7 +45,11 @@ from google.longrunning import operations_pb2 # type: ignore -from google.cloud.discoveryengine_v1.types import completion_service +from google.cloud.discoveryengine_v1.types import ( + completion_service, + import_config, + purge_config, +) from .base import CompletionServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -74,6 +84,22 @@ def post_complete_query(self, response): logging.log(f"Received response: {response}") return response + def pre_import_suggestion_deny_list_entries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_suggestion_deny_list_entries(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_purge_suggestion_deny_list_entries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_purge_suggestion_deny_list_entries(self, response): + logging.log(f"Received response: {response}") + return response + transport = CompletionServiceRestTransport(interceptor=MyCustomCompletionServiceInterceptor()) client = CompletionServiceClient(transport=transport) @@ -103,6 +129,56 @@ def post_complete_query( """ return response + def pre_import_suggestion_deny_list_entries( + self, + request: import_config.ImportSuggestionDenyListEntriesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + import_config.ImportSuggestionDenyListEntriesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for import_suggestion_deny_list_entries + + Override in a subclass to manipulate the request or metadata + before they are sent to the CompletionService server. + """ + return request, metadata + + def post_import_suggestion_deny_list_entries( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for import_suggestion_deny_list_entries + + Override in a subclass to manipulate the response + after it is returned by the CompletionService server but before + it is returned to user code. + """ + return response + + def pre_purge_suggestion_deny_list_entries( + self, + request: purge_config.PurgeSuggestionDenyListEntriesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + purge_config.PurgeSuggestionDenyListEntriesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for purge_suggestion_deny_list_entries + + Override in a subclass to manipulate the request or metadata + before they are sent to the CompletionService server. + """ + return request, metadata + + def post_purge_suggestion_deny_list_entries( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for purge_suggestion_deny_list_entries + + Override in a subclass to manipulate the response + after it is returned by the CompletionService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -241,11 +317,164 @@ def __init__( self._session = AuthorizedSession( self._credentials, default_host=self.DEFAULT_HOST ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) self._interceptor = interceptor or CompletionServiceRestInterceptor() self._prep_wrapped_messages(client_info) + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + class _CompleteQuery(CompletionServiceRestStub): def __hash__(self): return hash("CompleteQuery") @@ -343,6 +572,226 @@ def __call__( resp = self._interceptor.post_complete_query(resp) return resp + class _ImportSuggestionDenyListEntries(CompletionServiceRestStub): + def __hash__(self): + return hash("ImportSuggestionDenyListEntries") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: import_config.ImportSuggestionDenyListEntriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the import suggestion deny + list entries method over HTTP. + + Args: + request (~.import_config.ImportSuggestionDenyListEntriesRequest): + The request object. Request message for + [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.ImportSuggestionDenyListEntries] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/suggestionDenyListEntries:import", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*}/suggestionDenyListEntries:import", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_import_suggestion_deny_list_entries( + request, metadata + ) + pb_request = import_config.ImportSuggestionDenyListEntriesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_suggestion_deny_list_entries(resp) + return resp + + class _PurgeSuggestionDenyListEntries(CompletionServiceRestStub): + def __hash__(self): + return hash("PurgeSuggestionDenyListEntries") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: purge_config.PurgeSuggestionDenyListEntriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the purge suggestion deny + list entries method over HTTP. + + Args: + request (~.purge_config.PurgeSuggestionDenyListEntriesRequest): + The request object. Request message for + [CompletionService.PurgeSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.PurgeSuggestionDenyListEntries] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/suggestionDenyListEntries:purge", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/**}/suggestionDenyListEntries:purge", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_purge_suggestion_deny_list_entries( + request, metadata + ) + pb_request = purge_config.PurgeSuggestionDenyListEntriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_purge_suggestion_deny_list_entries(resp) + return resp + @property def complete_query( self, @@ -354,6 +803,26 @@ def complete_query( # In C++ this would require a dynamic_cast return self._CompleteQuery(self._session, self._host, self._interceptor) # type: ignore + @property + def import_suggestion_deny_list_entries( + self, + ) -> Callable[ + [import_config.ImportSuggestionDenyListEntriesRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportSuggestionDenyListEntries(self._session, self._host, self._interceptor) # type: ignore + + @property + def purge_suggestion_deny_list_entries( + self, + ) -> Callable[ + [purge_config.PurgeSuggestionDenyListEntriesRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PurgeSuggestionDenyListEntries(self._session, self._host, self._interceptor) # type: ignore + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -387,6 +856,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -505,6 +978,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/async_client.py index 258e252faf07..20f35d391c78 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/async_client.py @@ -692,7 +692,7 @@ async def sample_update_conversation(): [Conversation][google.cloud.discoveryengine.v1.Conversation] to update. The following are NOT supported: - - [conversation.name][] + - [Conversation.name][google.cloud.discoveryengine.v1.Conversation.name] If not set or empty, all supported fields are updated. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py index c0a0af633dfe..e4a18f5d224b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py @@ -1166,7 +1166,7 @@ def sample_update_conversation(): [Conversation][google.cloud.discoveryengine.v1.Conversation] to update. The following are NOT supported: - - [conversation.name][] + - [Conversation.name][google.cloud.discoveryengine.v1.Conversation.name] If not set or empty, all supported fields are updated. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py index dea9df990d58..d5c8c46f50a5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py @@ -460,6 +460,11 @@ def __call__( "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}:converse", "body": "*", }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/conversations/*}:converse", + "body": "*", + }, ] request, metadata = self._interceptor.pre_converse_conversation( request, metadata @@ -569,6 +574,11 @@ def __call__( "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/conversations", "body": "conversation", }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/engines/*}/conversations", + "body": "conversation", + }, ] request, metadata = self._interceptor.pre_create_conversation( request, metadata @@ -668,6 +678,10 @@ def __call__( "method": "delete", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}", }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/conversations/*}", + }, ] request, metadata = self._interceptor.pre_delete_conversation( request, metadata @@ -756,6 +770,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/conversations/*}", + }, ] request, metadata = self._interceptor.pre_get_conversation( request, metadata @@ -852,6 +870,10 @@ def __call__( "method": "get", "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/conversations", }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/engines/*}/conversations", + }, ] request, metadata = self._interceptor.pre_list_conversations( request, metadata @@ -951,6 +973,11 @@ def __call__( "uri": "/v1/{conversation.name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}", "body": "conversation", }, + { + "method": "patch", + "uri": "/v1/{conversation.name=projects/*/locations/*/collections/*/engines/*/conversations/*}", + "body": "conversation", + }, ] request, metadata = self._interceptor.pre_update_conversation( request, metadata @@ -1104,6 +1131,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -1222,6 +1253,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/__init__.py new file mode 100644 index 000000000000..ff4ce7fbfad6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import DataStoreServiceAsyncClient +from .client import DataStoreServiceClient + +__all__ = ( + "DataStoreServiceClient", + "DataStoreServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/async_client.py new file mode 100644 index 000000000000..a21c507449c3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/async_client.py @@ -0,0 +1,1086 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.services.data_store_service import pagers +from google.cloud.discoveryengine_v1.types import common +from google.cloud.discoveryengine_v1.types import data_store +from google.cloud.discoveryengine_v1.types import data_store as gcd_data_store +from google.cloud.discoveryengine_v1.types import data_store_service + +from .client import DataStoreServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, DataStoreServiceTransport +from .transports.grpc_asyncio import DataStoreServiceGrpcAsyncIOTransport + + +class DataStoreServiceAsyncClient: + """Service for managing + [DataStore][google.cloud.discoveryengine.v1.DataStore] + configuration. + """ + + _client: DataStoreServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DataStoreServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataStoreServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DataStoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DataStoreServiceClient._DEFAULT_UNIVERSE + + collection_path = staticmethod(DataStoreServiceClient.collection_path) + parse_collection_path = staticmethod(DataStoreServiceClient.parse_collection_path) + data_store_path = staticmethod(DataStoreServiceClient.data_store_path) + parse_data_store_path = staticmethod(DataStoreServiceClient.parse_data_store_path) + common_billing_account_path = staticmethod( + DataStoreServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DataStoreServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DataStoreServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + DataStoreServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + DataStoreServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DataStoreServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(DataStoreServiceClient.common_project_path) + parse_common_project_path = staticmethod( + DataStoreServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(DataStoreServiceClient.common_location_path) + parse_common_location_path = staticmethod( + DataStoreServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataStoreServiceAsyncClient: The constructed client. + """ + return DataStoreServiceClient.from_service_account_info.__func__(DataStoreServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataStoreServiceAsyncClient: The constructed client. + """ + return DataStoreServiceClient.from_service_account_file.__func__(DataStoreServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DataStoreServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DataStoreServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataStoreServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(DataStoreServiceClient).get_transport_class, type(DataStoreServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DataStoreServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data store service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DataStoreServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataStoreServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_data_store( + self, + request: Optional[ + Union[data_store_service.CreateDataStoreRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + data_store: Optional[gcd_data_store.DataStore] = None, + data_store_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a + [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + DataStore is for storing + [Documents][google.cloud.discoveryengine.v1.Document]. To serve + these documents for Search, or Recommendation use case, an + [Engine][google.cloud.discoveryengine.v1.Engine] needs to be + created separately. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_create_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1.CreateDataStoreRequest( + parent="parent_value", + data_store=data_store, + data_store_id="data_store_id_value", + ) + + # Make the request + operation = client.create_data_store(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.CreateDataStoreRequest, dict]]): + The request object. Request for + [DataStoreService.CreateDataStore][google.cloud.discoveryengine.v1.DataStoreService.CreateDataStore] + method. + parent (:class:`str`): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_store (:class:`google.cloud.discoveryengine_v1.types.DataStore`): + Required. The + [DataStore][google.cloud.discoveryengine.v1.DataStore] + to create. + + This corresponds to the ``data_store`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_store_id (:class:`str`): + Required. The ID to use for the + [DataStore][google.cloud.discoveryengine.v1.DataStore], + which will become the final component of the + [DataStore][google.cloud.discoveryengine.v1.DataStore]'s + resource name. + + This field must conform to + `RFC-1034 `__ + standard with a length limit of 63 characters. + Otherwise, an INVALID_ARGUMENT error is returned. + + This corresponds to the ``data_store_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1.types.DataStore` + DataStore captures global settings and configs at the + DataStore level. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, data_store, data_store_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_store_service.CreateDataStoreRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_store is not None: + request.data_store = data_store + if data_store_id is not None: + request.data_store_id = data_store_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_data_store, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcd_data_store.DataStore, + metadata_type=data_store_service.CreateDataStoreMetadata, + ) + + # Done; return the response. + return response + + async def get_data_store( + self, + request: Optional[Union[data_store_service.GetDataStoreRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> data_store.DataStore: + r"""Gets a [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_get_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetDataStoreRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_store(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.GetDataStoreRequest, dict]]): + The request object. Request message for + [DataStoreService.GetDataStore][google.cloud.discoveryengine.v1.DataStoreService.GetDataStore] + method. + name (:class:`str`): + Required. Full resource name of + [DataStore][google.cloud.discoveryengine.v1.DataStore], + such as + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + If the caller does not have permission to access the + [DataStore][google.cloud.discoveryengine.v1.DataStore], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [DataStore][google.cloud.discoveryengine.v1.DataStore] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.DataStore: + DataStore captures global settings + and configs at the DataStore level. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_store_service.GetDataStoreRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_data_store, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_data_stores( + self, + request: Optional[Union[data_store_service.ListDataStoresRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataStoresAsyncPager: + r"""Lists all the + [DataStore][google.cloud.discoveryengine.v1.DataStore]s + associated with the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_list_data_stores(): + # Create a client + client = discoveryengine_v1.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListDataStoresRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_stores(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.ListDataStoresRequest, dict]]): + The request object. Request message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1.DataStoreService.ListDataStores] + method. + parent (:class:`str`): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection_id}``. + + If the caller does not have permission to list + [DataStore][google.cloud.discoveryengine.v1.DataStore]s + under this location, regardless of whether or not this + data store exists, a PERMISSION_DENIED error is + returned. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.data_store_service.pagers.ListDataStoresAsyncPager: + Response message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1.DataStoreService.ListDataStores] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_store_service.ListDataStoresRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_data_stores, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataStoresAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_data_store( + self, + request: Optional[ + Union[data_store_service.DeleteDataStoreRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a + [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_delete_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteDataStoreRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_store(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.DeleteDataStoreRequest, dict]]): + The request object. Request message for + [DataStoreService.DeleteDataStore][google.cloud.discoveryengine.v1.DataStoreService.DeleteDataStore] + method. + name (:class:`str`): + Required. Full resource name of + [DataStore][google.cloud.discoveryengine.v1.DataStore], + such as + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + If the caller does not have permission to delete the + [DataStore][google.cloud.discoveryengine.v1.DataStore], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [DataStore][google.cloud.discoveryengine.v1.DataStore] + to delete does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_store_service.DeleteDataStoreRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_data_store, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=data_store_service.DeleteDataStoreMetadata, + ) + + # Done; return the response. + return response + + async def update_data_store( + self, + request: Optional[ + Union[data_store_service.UpdateDataStoreRequest, dict] + ] = None, + *, + data_store: Optional[gcd_data_store.DataStore] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_data_store.DataStore: + r"""Updates a [DataStore][google.cloud.discoveryengine.v1.DataStore] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_update_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1.UpdateDataStoreRequest( + data_store=data_store, + ) + + # Make the request + response = await client.update_data_store(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.UpdateDataStoreRequest, dict]]): + The request object. Request message for + [DataStoreService.UpdateDataStore][google.cloud.discoveryengine.v1.DataStoreService.UpdateDataStore] + method. + data_store (:class:`google.cloud.discoveryengine_v1.types.DataStore`): + Required. The + [DataStore][google.cloud.discoveryengine.v1.DataStore] + to update. + + If the caller does not have permission to update the + [DataStore][google.cloud.discoveryengine.v1.DataStore], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [DataStore][google.cloud.discoveryengine.v1.DataStore] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``data_store`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Indicates which fields in the provided + [DataStore][google.cloud.discoveryengine.v1.DataStore] + to update. + + If an unsupported or unknown field is provided, an + INVALID_ARGUMENT error is returned. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.DataStore: + DataStore captures global settings + and configs at the DataStore level. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_store, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_store_service.UpdateDataStoreRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_store is not None: + request.data_store = data_store + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_data_store, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store.name", request.data_store.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DataStoreServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DataStoreServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/client.py new file mode 100644 index 000000000000..b378b2b3bc68 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/client.py @@ -0,0 +1,1528 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.services.data_store_service import pagers +from google.cloud.discoveryengine_v1.types import common +from google.cloud.discoveryengine_v1.types import data_store +from google.cloud.discoveryengine_v1.types import data_store as gcd_data_store +from google.cloud.discoveryengine_v1.types import data_store_service + +from .transports.base import DEFAULT_CLIENT_INFO, DataStoreServiceTransport +from .transports.grpc import DataStoreServiceGrpcTransport +from .transports.grpc_asyncio import DataStoreServiceGrpcAsyncIOTransport +from .transports.rest import DataStoreServiceRestTransport + + +class DataStoreServiceClientMeta(type): + """Metaclass for the DataStoreService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DataStoreServiceTransport]] + _transport_registry["grpc"] = DataStoreServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DataStoreServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DataStoreServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DataStoreServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataStoreServiceClient(metaclass=DataStoreServiceClientMeta): + """Service for managing + [DataStore][google.cloud.discoveryengine.v1.DataStore] + configuration. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataStoreServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataStoreServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataStoreServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataStoreServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def collection_path( + project: str, + location: str, + collection: str, + ) -> str: + """Returns a fully-qualified collection string.""" + return ( + "projects/{project}/locations/{location}/collections/{collection}".format( + project=project, + location=location, + collection=collection, + ) + ) + + @staticmethod + def parse_collection_path(path: str) -> Dict[str, str]: + """Parses a collection path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/collections/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = DataStoreServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = DataStoreServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DataStoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DataStoreServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = DataStoreServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or DataStoreServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataStoreServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data store service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DataStoreServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = DataStoreServiceClient._read_environment_variables() + self._client_cert_source = DataStoreServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = DataStoreServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DataStoreServiceTransport) + if transport_provided: + # transport is a DataStoreServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DataStoreServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or DataStoreServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(cast(str, transport)) + self._transport = Transport( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_data_store( + self, + request: Optional[ + Union[data_store_service.CreateDataStoreRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + data_store: Optional[gcd_data_store.DataStore] = None, + data_store_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a + [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + DataStore is for storing + [Documents][google.cloud.discoveryengine.v1.Document]. To serve + these documents for Search, or Recommendation use case, an + [Engine][google.cloud.discoveryengine.v1.Engine] needs to be + created separately. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_create_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1.CreateDataStoreRequest( + parent="parent_value", + data_store=data_store, + data_store_id="data_store_id_value", + ) + + # Make the request + operation = client.create_data_store(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.CreateDataStoreRequest, dict]): + The request object. Request for + [DataStoreService.CreateDataStore][google.cloud.discoveryengine.v1.DataStoreService.CreateDataStore] + method. + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_store (google.cloud.discoveryengine_v1.types.DataStore): + Required. The + [DataStore][google.cloud.discoveryengine.v1.DataStore] + to create. + + This corresponds to the ``data_store`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_store_id (str): + Required. The ID to use for the + [DataStore][google.cloud.discoveryengine.v1.DataStore], + which will become the final component of the + [DataStore][google.cloud.discoveryengine.v1.DataStore]'s + resource name. + + This field must conform to + `RFC-1034 `__ + standard with a length limit of 63 characters. + Otherwise, an INVALID_ARGUMENT error is returned. + + This corresponds to the ``data_store_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1.types.DataStore` + DataStore captures global settings and configs at the + DataStore level. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, data_store, data_store_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_store_service.CreateDataStoreRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_store_service.CreateDataStoreRequest): + request = data_store_service.CreateDataStoreRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_store is not None: + request.data_store = data_store + if data_store_id is not None: + request.data_store_id = data_store_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_data_store] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcd_data_store.DataStore, + metadata_type=data_store_service.CreateDataStoreMetadata, + ) + + # Done; return the response. + return response + + def get_data_store( + self, + request: Optional[Union[data_store_service.GetDataStoreRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> data_store.DataStore: + r"""Gets a [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_get_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetDataStoreRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_store(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.GetDataStoreRequest, dict]): + The request object. Request message for + [DataStoreService.GetDataStore][google.cloud.discoveryengine.v1.DataStoreService.GetDataStore] + method. + name (str): + Required. Full resource name of + [DataStore][google.cloud.discoveryengine.v1.DataStore], + such as + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + If the caller does not have permission to access the + [DataStore][google.cloud.discoveryengine.v1.DataStore], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [DataStore][google.cloud.discoveryengine.v1.DataStore] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.DataStore: + DataStore captures global settings + and configs at the DataStore level. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_store_service.GetDataStoreRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_store_service.GetDataStoreRequest): + request = data_store_service.GetDataStoreRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_store] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_data_stores( + self, + request: Optional[Union[data_store_service.ListDataStoresRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataStoresPager: + r"""Lists all the + [DataStore][google.cloud.discoveryengine.v1.DataStore]s + associated with the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_list_data_stores(): + # Create a client + client = discoveryengine_v1.DataStoreServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListDataStoresRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_stores(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.ListDataStoresRequest, dict]): + The request object. Request message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1.DataStoreService.ListDataStores] + method. + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection_id}``. + + If the caller does not have permission to list + [DataStore][google.cloud.discoveryengine.v1.DataStore]s + under this location, regardless of whether or not this + data store exists, a PERMISSION_DENIED error is + returned. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.data_store_service.pagers.ListDataStoresPager: + Response message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1.DataStoreService.ListDataStores] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_store_service.ListDataStoresRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_store_service.ListDataStoresRequest): + request = data_store_service.ListDataStoresRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_stores] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataStoresPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_data_store( + self, + request: Optional[ + Union[data_store_service.DeleteDataStoreRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a + [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_delete_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteDataStoreRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_store(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.DeleteDataStoreRequest, dict]): + The request object. Request message for + [DataStoreService.DeleteDataStore][google.cloud.discoveryengine.v1.DataStoreService.DeleteDataStore] + method. + name (str): + Required. Full resource name of + [DataStore][google.cloud.discoveryengine.v1.DataStore], + such as + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + If the caller does not have permission to delete the + [DataStore][google.cloud.discoveryengine.v1.DataStore], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [DataStore][google.cloud.discoveryengine.v1.DataStore] + to delete does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_store_service.DeleteDataStoreRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_store_service.DeleteDataStoreRequest): + request = data_store_service.DeleteDataStoreRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_data_store] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=data_store_service.DeleteDataStoreMetadata, + ) + + # Done; return the response. + return response + + def update_data_store( + self, + request: Optional[ + Union[data_store_service.UpdateDataStoreRequest, dict] + ] = None, + *, + data_store: Optional[gcd_data_store.DataStore] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_data_store.DataStore: + r"""Updates a [DataStore][google.cloud.discoveryengine.v1.DataStore] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_update_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1.UpdateDataStoreRequest( + data_store=data_store, + ) + + # Make the request + response = client.update_data_store(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.UpdateDataStoreRequest, dict]): + The request object. Request message for + [DataStoreService.UpdateDataStore][google.cloud.discoveryengine.v1.DataStoreService.UpdateDataStore] + method. + data_store (google.cloud.discoveryengine_v1.types.DataStore): + Required. The + [DataStore][google.cloud.discoveryengine.v1.DataStore] + to update. + + If the caller does not have permission to update the + [DataStore][google.cloud.discoveryengine.v1.DataStore], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [DataStore][google.cloud.discoveryengine.v1.DataStore] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``data_store`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [DataStore][google.cloud.discoveryengine.v1.DataStore] + to update. + + If an unsupported or unknown field is provided, an + INVALID_ARGUMENT error is returned. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.DataStore: + DataStore captures global settings + and configs at the DataStore level. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_store, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_store_service.UpdateDataStoreRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_store_service.UpdateDataStoreRequest): + request = data_store_service.UpdateDataStoreRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_store is not None: + request.data_store = data_store + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_store] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store.name", request.data_store.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DataStoreServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DataStoreServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/pagers.py new file mode 100644 index 000000000000..6d8389bec2f1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1.types import data_store, data_store_service + + +class ListDataStoresPager: + """A pager for iterating through ``list_data_stores`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.ListDataStoresResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_stores`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataStores`` requests and continue to iterate + through the ``data_stores`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.ListDataStoresResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., data_store_service.ListDataStoresResponse], + request: data_store_service.ListDataStoresRequest, + response: data_store_service.ListDataStoresResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.ListDataStoresRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.ListDataStoresResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_store_service.ListDataStoresRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[data_store_service.ListDataStoresResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[data_store.DataStore]: + for page in self.pages: + yield from page.data_stores + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataStoresAsyncPager: + """A pager for iterating through ``list_data_stores`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.ListDataStoresResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_stores`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataStores`` requests and continue to iterate + through the ``data_stores`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.ListDataStoresResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[data_store_service.ListDataStoresResponse]], + request: data_store_service.ListDataStoresRequest, + response: data_store_service.ListDataStoresResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.ListDataStoresRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.ListDataStoresResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_store_service.ListDataStoresRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[data_store_service.ListDataStoresResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[data_store.DataStore]: + async def async_generator(): + async for page in self.pages: + for response in page.data_stores: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/__init__.py new file mode 100644 index 000000000000..6f5f07baebad --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataStoreServiceTransport +from .grpc import DataStoreServiceGrpcTransport +from .grpc_asyncio import DataStoreServiceGrpcAsyncIOTransport +from .rest import DataStoreServiceRestInterceptor, DataStoreServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DataStoreServiceTransport]] +_transport_registry["grpc"] = DataStoreServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DataStoreServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DataStoreServiceRestTransport + +__all__ = ( + "DataStoreServiceTransport", + "DataStoreServiceGrpcTransport", + "DataStoreServiceGrpcAsyncIOTransport", + "DataStoreServiceRestTransport", + "DataStoreServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/base.py new file mode 100644 index 000000000000..269618740a26 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/base.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version +from google.cloud.discoveryengine_v1.types import data_store +from google.cloud.discoveryengine_v1.types import data_store as gcd_data_store +from google.cloud.discoveryengine_v1.types import data_store_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class DataStoreServiceTransport(abc.ABC): + """Abstract transport class for DataStoreService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_data_store: gapic_v1.method.wrap_method( + self.create_data_store, + default_timeout=None, + client_info=client_info, + ), + self.get_data_store: gapic_v1.method.wrap_method( + self.get_data_store, + default_timeout=None, + client_info=client_info, + ), + self.list_data_stores: gapic_v1.method.wrap_method( + self.list_data_stores, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_store: gapic_v1.method.wrap_method( + self.delete_data_store, + default_timeout=None, + client_info=client_info, + ), + self.update_data_store: gapic_v1.method.wrap_method( + self.update_data_store, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_data_store( + self, + ) -> Callable[ + [data_store_service.CreateDataStoreRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_data_store( + self, + ) -> Callable[ + [data_store_service.GetDataStoreRequest], + Union[data_store.DataStore, Awaitable[data_store.DataStore]], + ]: + raise NotImplementedError() + + @property + def list_data_stores( + self, + ) -> Callable[ + [data_store_service.ListDataStoresRequest], + Union[ + data_store_service.ListDataStoresResponse, + Awaitable[data_store_service.ListDataStoresResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_data_store( + self, + ) -> Callable[ + [data_store_service.DeleteDataStoreRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_data_store( + self, + ) -> Callable[ + [data_store_service.UpdateDataStoreRequest], + Union[gcd_data_store.DataStore, Awaitable[gcd_data_store.DataStore]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DataStoreServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/grpc.py new file mode 100644 index 000000000000..3772bc48e392 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/grpc.py @@ -0,0 +1,445 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1.types import data_store +from google.cloud.discoveryengine_v1.types import data_store as gcd_data_store +from google.cloud.discoveryengine_v1.types import data_store_service + +from .base import DEFAULT_CLIENT_INFO, DataStoreServiceTransport + + +class DataStoreServiceGrpcTransport(DataStoreServiceTransport): + """gRPC backend transport for DataStoreService. + + Service for managing + [DataStore][google.cloud.discoveryengine.v1.DataStore] + configuration. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_data_store( + self, + ) -> Callable[ + [data_store_service.CreateDataStoreRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create data store method over gRPC. + + Creates a + [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + DataStore is for storing + [Documents][google.cloud.discoveryengine.v1.Document]. To serve + these documents for Search, or Recommendation use case, an + [Engine][google.cloud.discoveryengine.v1.Engine] needs to be + created separately. + + Returns: + Callable[[~.CreateDataStoreRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_data_store" not in self._stubs: + self._stubs["create_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DataStoreService/CreateDataStore", + request_serializer=data_store_service.CreateDataStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_data_store"] + + @property + def get_data_store( + self, + ) -> Callable[[data_store_service.GetDataStoreRequest], data_store.DataStore]: + r"""Return a callable for the get data store method over gRPC. + + Gets a [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + Returns: + Callable[[~.GetDataStoreRequest], + ~.DataStore]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_store" not in self._stubs: + self._stubs["get_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DataStoreService/GetDataStore", + request_serializer=data_store_service.GetDataStoreRequest.serialize, + response_deserializer=data_store.DataStore.deserialize, + ) + return self._stubs["get_data_store"] + + @property + def list_data_stores( + self, + ) -> Callable[ + [data_store_service.ListDataStoresRequest], + data_store_service.ListDataStoresResponse, + ]: + r"""Return a callable for the list data stores method over gRPC. + + Lists all the + [DataStore][google.cloud.discoveryengine.v1.DataStore]s + associated with the project. + + Returns: + Callable[[~.ListDataStoresRequest], + ~.ListDataStoresResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_stores" not in self._stubs: + self._stubs["list_data_stores"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DataStoreService/ListDataStores", + request_serializer=data_store_service.ListDataStoresRequest.serialize, + response_deserializer=data_store_service.ListDataStoresResponse.deserialize, + ) + return self._stubs["list_data_stores"] + + @property + def delete_data_store( + self, + ) -> Callable[ + [data_store_service.DeleteDataStoreRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete data store method over gRPC. + + Deletes a + [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + Returns: + Callable[[~.DeleteDataStoreRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_data_store" not in self._stubs: + self._stubs["delete_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DataStoreService/DeleteDataStore", + request_serializer=data_store_service.DeleteDataStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_data_store"] + + @property + def update_data_store( + self, + ) -> Callable[ + [data_store_service.UpdateDataStoreRequest], gcd_data_store.DataStore + ]: + r"""Return a callable for the update data store method over gRPC. + + Updates a [DataStore][google.cloud.discoveryengine.v1.DataStore] + + Returns: + Callable[[~.UpdateDataStoreRequest], + ~.DataStore]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_store" not in self._stubs: + self._stubs["update_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DataStoreService/UpdateDataStore", + request_serializer=data_store_service.UpdateDataStoreRequest.serialize, + response_deserializer=gcd_data_store.DataStore.deserialize, + ) + return self._stubs["update_data_store"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DataStoreServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..c94875b320fa --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/grpc_asyncio.py @@ -0,0 +1,448 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1.types import data_store +from google.cloud.discoveryengine_v1.types import data_store as gcd_data_store +from google.cloud.discoveryengine_v1.types import data_store_service + +from .base import DEFAULT_CLIENT_INFO, DataStoreServiceTransport +from .grpc import DataStoreServiceGrpcTransport + + +class DataStoreServiceGrpcAsyncIOTransport(DataStoreServiceTransport): + """gRPC AsyncIO backend transport for DataStoreService. + + Service for managing + [DataStore][google.cloud.discoveryengine.v1.DataStore] + configuration. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_data_store( + self, + ) -> Callable[ + [data_store_service.CreateDataStoreRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create data store method over gRPC. + + Creates a + [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + DataStore is for storing + [Documents][google.cloud.discoveryengine.v1.Document]. To serve + these documents for Search, or Recommendation use case, an + [Engine][google.cloud.discoveryengine.v1.Engine] needs to be + created separately. + + Returns: + Callable[[~.CreateDataStoreRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_data_store" not in self._stubs: + self._stubs["create_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DataStoreService/CreateDataStore", + request_serializer=data_store_service.CreateDataStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_data_store"] + + @property + def get_data_store( + self, + ) -> Callable[ + [data_store_service.GetDataStoreRequest], Awaitable[data_store.DataStore] + ]: + r"""Return a callable for the get data store method over gRPC. + + Gets a [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + Returns: + Callable[[~.GetDataStoreRequest], + Awaitable[~.DataStore]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_store" not in self._stubs: + self._stubs["get_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DataStoreService/GetDataStore", + request_serializer=data_store_service.GetDataStoreRequest.serialize, + response_deserializer=data_store.DataStore.deserialize, + ) + return self._stubs["get_data_store"] + + @property + def list_data_stores( + self, + ) -> Callable[ + [data_store_service.ListDataStoresRequest], + Awaitable[data_store_service.ListDataStoresResponse], + ]: + r"""Return a callable for the list data stores method over gRPC. + + Lists all the + [DataStore][google.cloud.discoveryengine.v1.DataStore]s + associated with the project. + + Returns: + Callable[[~.ListDataStoresRequest], + Awaitable[~.ListDataStoresResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_stores" not in self._stubs: + self._stubs["list_data_stores"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DataStoreService/ListDataStores", + request_serializer=data_store_service.ListDataStoresRequest.serialize, + response_deserializer=data_store_service.ListDataStoresResponse.deserialize, + ) + return self._stubs["list_data_stores"] + + @property + def delete_data_store( + self, + ) -> Callable[ + [data_store_service.DeleteDataStoreRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete data store method over gRPC. + + Deletes a + [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + Returns: + Callable[[~.DeleteDataStoreRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_data_store" not in self._stubs: + self._stubs["delete_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DataStoreService/DeleteDataStore", + request_serializer=data_store_service.DeleteDataStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_data_store"] + + @property + def update_data_store( + self, + ) -> Callable[ + [data_store_service.UpdateDataStoreRequest], Awaitable[gcd_data_store.DataStore] + ]: + r"""Return a callable for the update data store method over gRPC. + + Updates a [DataStore][google.cloud.discoveryengine.v1.DataStore] + + Returns: + Callable[[~.UpdateDataStoreRequest], + Awaitable[~.DataStore]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_store" not in self._stubs: + self._stubs["update_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DataStoreService/UpdateDataStore", + request_serializer=data_store_service.UpdateDataStoreRequest.serialize, + response_deserializer=gcd_data_store.DataStore.deserialize, + ) + return self._stubs["update_data_store"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("DataStoreServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py new file mode 100644 index 000000000000..cf586b54aa79 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py @@ -0,0 +1,1336 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import data_store +from google.cloud.discoveryengine_v1.types import data_store as gcd_data_store +from google.cloud.discoveryengine_v1.types import data_store_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DataStoreServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DataStoreServiceRestInterceptor: + """Interceptor for DataStoreService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DataStoreServiceRestTransport. + + .. code-block:: python + class MyCustomDataStoreServiceInterceptor(DataStoreServiceRestInterceptor): + def pre_create_data_store(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_data_store(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_data_store(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_data_store(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_data_store(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_store(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_data_stores(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_stores(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_data_store(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_data_store(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DataStoreServiceRestTransport(interceptor=MyCustomDataStoreServiceInterceptor()) + client = DataStoreServiceClient(transport=transport) + + + """ + + def pre_create_data_store( + self, + request: data_store_service.CreateDataStoreRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[data_store_service.CreateDataStoreRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_data_store + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_create_data_store( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_data_store + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + + def pre_delete_data_store( + self, + request: data_store_service.DeleteDataStoreRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[data_store_service.DeleteDataStoreRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_data_store + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_delete_data_store( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_data_store + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + + def pre_get_data_store( + self, + request: data_store_service.GetDataStoreRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[data_store_service.GetDataStoreRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_data_store + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_get_data_store( + self, response: data_store.DataStore + ) -> data_store.DataStore: + """Post-rpc interceptor for get_data_store + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + + def pre_list_data_stores( + self, + request: data_store_service.ListDataStoresRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[data_store_service.ListDataStoresRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_data_stores + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_list_data_stores( + self, response: data_store_service.ListDataStoresResponse + ) -> data_store_service.ListDataStoresResponse: + """Post-rpc interceptor for list_data_stores + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + + def pre_update_data_store( + self, + request: data_store_service.UpdateDataStoreRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[data_store_service.UpdateDataStoreRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_data_store + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_update_data_store( + self, response: gcd_data_store.DataStore + ) -> gcd_data_store.DataStore: + """Post-rpc interceptor for update_data_store + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DataStoreServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DataStoreServiceRestInterceptor + + +class DataStoreServiceRestTransport(DataStoreServiceTransport): + """REST backend transport for DataStoreService. + + Service for managing + [DataStore][google.cloud.discoveryengine.v1.DataStore] + configuration. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DataStoreServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DataStoreServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateDataStore(DataStoreServiceRestStub): + def __hash__(self): + return hash("CreateDataStore") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "dataStoreId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: data_store_service.CreateDataStoreRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create data store method over HTTP. + + Args: + request (~.data_store_service.CreateDataStoreRequest): + The request object. Request for + [DataStoreService.CreateDataStore][google.cloud.discoveryengine.v1.DataStoreService.CreateDataStore] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/dataStores", + "body": "data_store", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*}/dataStores", + "body": "data_store", + }, + ] + request, metadata = self._interceptor.pre_create_data_store( + request, metadata + ) + pb_request = data_store_service.CreateDataStoreRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_data_store(resp) + return resp + + class _DeleteDataStore(DataStoreServiceRestStub): + def __hash__(self): + return hash("DeleteDataStore") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: data_store_service.DeleteDataStoreRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete data store method over HTTP. + + Args: + request (~.data_store_service.DeleteDataStoreRequest): + The request object. Request message for + [DataStoreService.DeleteDataStore][google.cloud.discoveryengine.v1.DataStoreService.DeleteDataStore] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_data_store( + request, metadata + ) + pb_request = data_store_service.DeleteDataStoreRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_data_store(resp) + return resp + + class _GetDataStore(DataStoreServiceRestStub): + def __hash__(self): + return hash("GetDataStore") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: data_store_service.GetDataStoreRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> data_store.DataStore: + r"""Call the get data store method over HTTP. + + Args: + request (~.data_store_service.GetDataStoreRequest): + The request object. Request message for + [DataStoreService.GetDataStore][google.cloud.discoveryengine.v1.DataStoreService.GetDataStore] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.data_store.DataStore: + DataStore captures global settings + and configs at the DataStore level. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}", + }, + ] + request, metadata = self._interceptor.pre_get_data_store(request, metadata) + pb_request = data_store_service.GetDataStoreRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_store.DataStore() + pb_resp = data_store.DataStore.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_data_store(resp) + return resp + + class _ListDataStores(DataStoreServiceRestStub): + def __hash__(self): + return hash("ListDataStores") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: data_store_service.ListDataStoresRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> data_store_service.ListDataStoresResponse: + r"""Call the list data stores method over HTTP. + + Args: + request (~.data_store_service.ListDataStoresRequest): + The request object. Request message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1.DataStoreService.ListDataStores] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.data_store_service.ListDataStoresResponse: + Response message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1.DataStoreService.ListDataStores] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/dataStores", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/collections/*}/dataStores", + }, + ] + request, metadata = self._interceptor.pre_list_data_stores( + request, metadata + ) + pb_request = data_store_service.ListDataStoresRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_store_service.ListDataStoresResponse() + pb_resp = data_store_service.ListDataStoresResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_data_stores(resp) + return resp + + class _UpdateDataStore(DataStoreServiceRestStub): + def __hash__(self): + return hash("UpdateDataStore") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: data_store_service.UpdateDataStoreRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_data_store.DataStore: + r"""Call the update data store method over HTTP. + + Args: + request (~.data_store_service.UpdateDataStoreRequest): + The request object. Request message for + [DataStoreService.UpdateDataStore][google.cloud.discoveryengine.v1.DataStoreService.UpdateDataStore] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_data_store.DataStore: + DataStore captures global settings + and configs at the DataStore level. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{data_store.name=projects/*/locations/*/dataStores/*}", + "body": "data_store", + }, + { + "method": "patch", + "uri": "/v1/{data_store.name=projects/*/locations/*/collections/*/dataStores/*}", + "body": "data_store", + }, + ] + request, metadata = self._interceptor.pre_update_data_store( + request, metadata + ) + pb_request = data_store_service.UpdateDataStoreRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_data_store.DataStore() + pb_resp = gcd_data_store.DataStore.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_data_store(resp) + return resp + + @property + def create_data_store( + self, + ) -> Callable[ + [data_store_service.CreateDataStoreRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDataStore(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_data_store( + self, + ) -> Callable[ + [data_store_service.DeleteDataStoreRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDataStore(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_data_store( + self, + ) -> Callable[[data_store_service.GetDataStoreRequest], data_store.DataStore]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataStore(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_data_stores( + self, + ) -> Callable[ + [data_store_service.ListDataStoresRequest], + data_store_service.ListDataStoresResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataStores(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_data_store( + self, + ) -> Callable[ + [data_store_service.UpdateDataStoreRequest], gcd_data_store.DataStore + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDataStore(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(DataStoreServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(DataStoreServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DataStoreServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py index e3818ac384b3..1c06ce80de12 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py @@ -444,6 +444,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -502,6 +506,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", @@ -1352,6 +1360,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -1470,6 +1482,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/__init__.py new file mode 100644 index 000000000000..642a2e63fcb4 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import EngineServiceAsyncClient +from .client import EngineServiceClient + +__all__ = ( + "EngineServiceClient", + "EngineServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/async_client.py new file mode 100644 index 000000000000..9a7efc088084 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/async_client.py @@ -0,0 +1,1053 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.services.engine_service import pagers +from google.cloud.discoveryengine_v1.types import common +from google.cloud.discoveryengine_v1.types import engine +from google.cloud.discoveryengine_v1.types import engine as gcd_engine +from google.cloud.discoveryengine_v1.types import engine_service + +from .client import EngineServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, EngineServiceTransport +from .transports.grpc_asyncio import EngineServiceGrpcAsyncIOTransport + + +class EngineServiceAsyncClient: + """Service for managing + [Engine][google.cloud.discoveryengine.v1.Engine] configuration. + """ + + _client: EngineServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = EngineServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = EngineServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = EngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = EngineServiceClient._DEFAULT_UNIVERSE + + collection_path = staticmethod(EngineServiceClient.collection_path) + parse_collection_path = staticmethod(EngineServiceClient.parse_collection_path) + engine_path = staticmethod(EngineServiceClient.engine_path) + parse_engine_path = staticmethod(EngineServiceClient.parse_engine_path) + common_billing_account_path = staticmethod( + EngineServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + EngineServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(EngineServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + EngineServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + EngineServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + EngineServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(EngineServiceClient.common_project_path) + parse_common_project_path = staticmethod( + EngineServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(EngineServiceClient.common_location_path) + parse_common_location_path = staticmethod( + EngineServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EngineServiceAsyncClient: The constructed client. + """ + return EngineServiceClient.from_service_account_info.__func__(EngineServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EngineServiceAsyncClient: The constructed client. + """ + return EngineServiceClient.from_service_account_file.__func__(EngineServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return EngineServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> EngineServiceTransport: + """Returns the transport used by the client instance. + + Returns: + EngineServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(EngineServiceClient).get_transport_class, type(EngineServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, EngineServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the engine service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.EngineServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = EngineServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_engine( + self, + request: Optional[Union[engine_service.CreateEngineRequest, dict]] = None, + *, + parent: Optional[str] = None, + engine: Optional[gcd_engine.Engine] = None, + engine_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a [Engine][google.cloud.discoveryengine.v1.Engine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_create_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceAsyncClient() + + # Initialize request argument(s) + engine = discoveryengine_v1.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1.CreateEngineRequest( + parent="parent_value", + engine=engine, + engine_id="engine_id_value", + ) + + # Make the request + operation = client.create_engine(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.CreateEngineRequest, dict]]): + The request object. Request for + [EngineService.CreateEngine][google.cloud.discoveryengine.v1.EngineService.CreateEngine] + method. + parent (:class:`str`): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + engine (:class:`google.cloud.discoveryengine_v1.types.Engine`): + Required. The + [Engine][google.cloud.discoveryengine.v1.Engine] to + create. + + This corresponds to the ``engine`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + engine_id (:class:`str`): + Required. The ID to use for the + [Engine][google.cloud.discoveryengine.v1.Engine], which + will become the final component of the + [Engine][google.cloud.discoveryengine.v1.Engine]'s + resource name. + + This field must conform to + `RFC-1034 `__ + standard with a length limit of 63 characters. + Otherwise, an INVALID_ARGUMENT error is returned. + + This corresponds to the ``engine_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.Engine` Metadata that describes the training and serving parameters of an + [Engine][google.cloud.discoveryengine.v1.Engine]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, engine, engine_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = engine_service.CreateEngineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if engine is not None: + request.engine = engine + if engine_id is not None: + request.engine_id = engine_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_engine, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcd_engine.Engine, + metadata_type=engine_service.CreateEngineMetadata, + ) + + # Done; return the response. + return response + + async def delete_engine( + self, + request: Optional[Union[engine_service.DeleteEngineRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a [Engine][google.cloud.discoveryengine.v1.Engine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_delete_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteEngineRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_engine(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.DeleteEngineRequest, dict]]): + The request object. Request message for + [EngineService.DeleteEngine][google.cloud.discoveryengine.v1.EngineService.DeleteEngine] + method. + name (:class:`str`): + Required. Full resource name of + [Engine][google.cloud.discoveryengine.v1.Engine], such + as + ``projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}``. + + If the caller does not have permission to delete the + [Engine][google.cloud.discoveryengine.v1.Engine], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the [Engine][google.cloud.discoveryengine.v1.Engine] + to delete does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = engine_service.DeleteEngineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_engine, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=engine_service.DeleteEngineMetadata, + ) + + # Done; return the response. + return response + + async def update_engine( + self, + request: Optional[Union[engine_service.UpdateEngineRequest, dict]] = None, + *, + engine: Optional[gcd_engine.Engine] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_engine.Engine: + r"""Updates an [Engine][google.cloud.discoveryengine.v1.Engine] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_update_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceAsyncClient() + + # Initialize request argument(s) + engine = discoveryengine_v1.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1.UpdateEngineRequest( + engine=engine, + ) + + # Make the request + response = await client.update_engine(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.UpdateEngineRequest, dict]]): + The request object. Request message for + [EngineService.UpdateEngine][google.cloud.discoveryengine.v1.EngineService.UpdateEngine] + method. + engine (:class:`google.cloud.discoveryengine_v1.types.Engine`): + Required. The + [Engine][google.cloud.discoveryengine.v1.Engine] to + update. + + If the caller does not have permission to update the + [Engine][google.cloud.discoveryengine.v1.Engine], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the [Engine][google.cloud.discoveryengine.v1.Engine] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``engine`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Indicates which fields in the provided + [Engine][google.cloud.discoveryengine.v1.Engine] to + update. + + If an unsupported or unknown field is provided, an + INVALID_ARGUMENT error is returned. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Engine: + Metadata that describes the training and serving parameters of an + [Engine][google.cloud.discoveryengine.v1.Engine]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([engine, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = engine_service.UpdateEngineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if engine is not None: + request.engine = engine + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_engine, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("engine.name", request.engine.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_engine( + self, + request: Optional[Union[engine_service.GetEngineRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> engine.Engine: + r"""Gets a [Engine][google.cloud.discoveryengine.v1.Engine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_get_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetEngineRequest( + name="name_value", + ) + + # Make the request + response = await client.get_engine(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.GetEngineRequest, dict]]): + The request object. Request message for + [EngineService.GetEngine][google.cloud.discoveryengine.v1.EngineService.GetEngine] + method. + name (:class:`str`): + Required. Full resource name of + [Engine][google.cloud.discoveryengine.v1.Engine], such + as + ``projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Engine: + Metadata that describes the training and serving parameters of an + [Engine][google.cloud.discoveryengine.v1.Engine]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = engine_service.GetEngineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_engine, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_engines( + self, + request: Optional[Union[engine_service.ListEnginesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEnginesAsyncPager: + r"""Lists all the [Engine][google.cloud.discoveryengine.v1.Engine]s + associated with the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_list_engines(): + # Create a client + client = discoveryengine_v1.EngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListEnginesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_engines(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.ListEnginesRequest, dict]]): + The request object. Request message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1.EngineService.ListEngines] + method. + parent (:class:`str`): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.engine_service.pagers.ListEnginesAsyncPager: + Response message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1.EngineService.ListEngines] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = engine_service.ListEnginesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_engines, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEnginesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "EngineServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("EngineServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/client.py new file mode 100644 index 000000000000..61c21ec9d9c3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/client.py @@ -0,0 +1,1495 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.services.engine_service import pagers +from google.cloud.discoveryengine_v1.types import common +from google.cloud.discoveryengine_v1.types import engine +from google.cloud.discoveryengine_v1.types import engine as gcd_engine +from google.cloud.discoveryengine_v1.types import engine_service + +from .transports.base import DEFAULT_CLIENT_INFO, EngineServiceTransport +from .transports.grpc import EngineServiceGrpcTransport +from .transports.grpc_asyncio import EngineServiceGrpcAsyncIOTransport +from .transports.rest import EngineServiceRestTransport + + +class EngineServiceClientMeta(type): + """Metaclass for the EngineService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[EngineServiceTransport]] + _transport_registry["grpc"] = EngineServiceGrpcTransport + _transport_registry["grpc_asyncio"] = EngineServiceGrpcAsyncIOTransport + _transport_registry["rest"] = EngineServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[EngineServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class EngineServiceClient(metaclass=EngineServiceClientMeta): + """Service for managing + [Engine][google.cloud.discoveryengine.v1.Engine] configuration. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EngineServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EngineServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> EngineServiceTransport: + """Returns the transport used by the client instance. + + Returns: + EngineServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def collection_path( + project: str, + location: str, + collection: str, + ) -> str: + """Returns a fully-qualified collection string.""" + return ( + "projects/{project}/locations/{location}/collections/{collection}".format( + project=project, + location=location, + collection=collection, + ) + ) + + @staticmethod + def parse_collection_path(path: str) -> Dict[str, str]: + """Parses a collection path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/collections/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def engine_path( + project: str, + location: str, + collection: str, + engine: str, + ) -> str: + """Returns a fully-qualified engine string.""" + return "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + + @staticmethod + def parse_engine_path(path: str) -> Dict[str, str]: + """Parses a engine path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/collections/(?P.+?)/engines/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = EngineServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = EngineServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = EngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = EngineServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = EngineServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or EngineServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, EngineServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the engine service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, EngineServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = EngineServiceClient._read_environment_variables() + self._client_cert_source = EngineServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = EngineServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, EngineServiceTransport) + if transport_provided: + # transport is a EngineServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(EngineServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or EngineServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(cast(str, transport)) + self._transport = Transport( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_engine( + self, + request: Optional[Union[engine_service.CreateEngineRequest, dict]] = None, + *, + parent: Optional[str] = None, + engine: Optional[gcd_engine.Engine] = None, + engine_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a [Engine][google.cloud.discoveryengine.v1.Engine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_create_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceClient() + + # Initialize request argument(s) + engine = discoveryengine_v1.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1.CreateEngineRequest( + parent="parent_value", + engine=engine, + engine_id="engine_id_value", + ) + + # Make the request + operation = client.create_engine(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.CreateEngineRequest, dict]): + The request object. Request for + [EngineService.CreateEngine][google.cloud.discoveryengine.v1.EngineService.CreateEngine] + method. + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + engine (google.cloud.discoveryengine_v1.types.Engine): + Required. The + [Engine][google.cloud.discoveryengine.v1.Engine] to + create. + + This corresponds to the ``engine`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + engine_id (str): + Required. The ID to use for the + [Engine][google.cloud.discoveryengine.v1.Engine], which + will become the final component of the + [Engine][google.cloud.discoveryengine.v1.Engine]'s + resource name. + + This field must conform to + `RFC-1034 `__ + standard with a length limit of 63 characters. + Otherwise, an INVALID_ARGUMENT error is returned. + + This corresponds to the ``engine_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.Engine` Metadata that describes the training and serving parameters of an + [Engine][google.cloud.discoveryengine.v1.Engine]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, engine, engine_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a engine_service.CreateEngineRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, engine_service.CreateEngineRequest): + request = engine_service.CreateEngineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if engine is not None: + request.engine = engine + if engine_id is not None: + request.engine_id = engine_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_engine] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcd_engine.Engine, + metadata_type=engine_service.CreateEngineMetadata, + ) + + # Done; return the response. + return response + + def delete_engine( + self, + request: Optional[Union[engine_service.DeleteEngineRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a [Engine][google.cloud.discoveryengine.v1.Engine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_delete_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteEngineRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_engine(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.DeleteEngineRequest, dict]): + The request object. Request message for + [EngineService.DeleteEngine][google.cloud.discoveryengine.v1.EngineService.DeleteEngine] + method. + name (str): + Required. Full resource name of + [Engine][google.cloud.discoveryengine.v1.Engine], such + as + ``projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}``. + + If the caller does not have permission to delete the + [Engine][google.cloud.discoveryengine.v1.Engine], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the [Engine][google.cloud.discoveryengine.v1.Engine] + to delete does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a engine_service.DeleteEngineRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, engine_service.DeleteEngineRequest): + request = engine_service.DeleteEngineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_engine] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=engine_service.DeleteEngineMetadata, + ) + + # Done; return the response. + return response + + def update_engine( + self, + request: Optional[Union[engine_service.UpdateEngineRequest, dict]] = None, + *, + engine: Optional[gcd_engine.Engine] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_engine.Engine: + r"""Updates an [Engine][google.cloud.discoveryengine.v1.Engine] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_update_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceClient() + + # Initialize request argument(s) + engine = discoveryengine_v1.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1.UpdateEngineRequest( + engine=engine, + ) + + # Make the request + response = client.update_engine(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.UpdateEngineRequest, dict]): + The request object. Request message for + [EngineService.UpdateEngine][google.cloud.discoveryengine.v1.EngineService.UpdateEngine] + method. + engine (google.cloud.discoveryengine_v1.types.Engine): + Required. The + [Engine][google.cloud.discoveryengine.v1.Engine] to + update. + + If the caller does not have permission to update the + [Engine][google.cloud.discoveryengine.v1.Engine], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the [Engine][google.cloud.discoveryengine.v1.Engine] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``engine`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [Engine][google.cloud.discoveryengine.v1.Engine] to + update. + + If an unsupported or unknown field is provided, an + INVALID_ARGUMENT error is returned. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Engine: + Metadata that describes the training and serving parameters of an + [Engine][google.cloud.discoveryengine.v1.Engine]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([engine, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a engine_service.UpdateEngineRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, engine_service.UpdateEngineRequest): + request = engine_service.UpdateEngineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if engine is not None: + request.engine = engine + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_engine] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("engine.name", request.engine.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_engine( + self, + request: Optional[Union[engine_service.GetEngineRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> engine.Engine: + r"""Gets a [Engine][google.cloud.discoveryengine.v1.Engine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_get_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetEngineRequest( + name="name_value", + ) + + # Make the request + response = client.get_engine(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.GetEngineRequest, dict]): + The request object. Request message for + [EngineService.GetEngine][google.cloud.discoveryengine.v1.EngineService.GetEngine] + method. + name (str): + Required. Full resource name of + [Engine][google.cloud.discoveryengine.v1.Engine], such + as + ``projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.Engine: + Metadata that describes the training and serving parameters of an + [Engine][google.cloud.discoveryengine.v1.Engine]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a engine_service.GetEngineRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, engine_service.GetEngineRequest): + request = engine_service.GetEngineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_engine] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_engines( + self, + request: Optional[Union[engine_service.ListEnginesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEnginesPager: + r"""Lists all the [Engine][google.cloud.discoveryengine.v1.Engine]s + associated with the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_list_engines(): + # Create a client + client = discoveryengine_v1.EngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListEnginesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_engines(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.ListEnginesRequest, dict]): + The request object. Request message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1.EngineService.ListEngines] + method. + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.engine_service.pagers.ListEnginesPager: + Response message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1.EngineService.ListEngines] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a engine_service.ListEnginesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, engine_service.ListEnginesRequest): + request = engine_service.ListEnginesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_engines] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEnginesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "EngineServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("EngineServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/pagers.py new file mode 100644 index 000000000000..4562185c4b38 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1.types import engine, engine_service + + +class ListEnginesPager: + """A pager for iterating through ``list_engines`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.ListEnginesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``engines`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEngines`` requests and continue to iterate + through the ``engines`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.ListEnginesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., engine_service.ListEnginesResponse], + request: engine_service.ListEnginesRequest, + response: engine_service.ListEnginesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.ListEnginesRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.ListEnginesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = engine_service.ListEnginesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[engine_service.ListEnginesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[engine.Engine]: + for page in self.pages: + yield from page.engines + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListEnginesAsyncPager: + """A pager for iterating through ``list_engines`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.ListEnginesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``engines`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEngines`` requests and continue to iterate + through the ``engines`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.ListEnginesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[engine_service.ListEnginesResponse]], + request: engine_service.ListEnginesRequest, + response: engine_service.ListEnginesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.ListEnginesRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.ListEnginesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = engine_service.ListEnginesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[engine_service.ListEnginesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[engine.Engine]: + async def async_generator(): + async for page in self.pages: + for response in page.engines: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/__init__.py new file mode 100644 index 000000000000..4b34767ce1ca --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import EngineServiceTransport +from .grpc import EngineServiceGrpcTransport +from .grpc_asyncio import EngineServiceGrpcAsyncIOTransport +from .rest import EngineServiceRestInterceptor, EngineServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[EngineServiceTransport]] +_transport_registry["grpc"] = EngineServiceGrpcTransport +_transport_registry["grpc_asyncio"] = EngineServiceGrpcAsyncIOTransport +_transport_registry["rest"] = EngineServiceRestTransport + +__all__ = ( + "EngineServiceTransport", + "EngineServiceGrpcTransport", + "EngineServiceGrpcAsyncIOTransport", + "EngineServiceRestTransport", + "EngineServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/base.py new file mode 100644 index 000000000000..a01eb6367116 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/base.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version +from google.cloud.discoveryengine_v1.types import engine +from google.cloud.discoveryengine_v1.types import engine as gcd_engine +from google.cloud.discoveryengine_v1.types import engine_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class EngineServiceTransport(abc.ABC): + """Abstract transport class for EngineService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_engine: gapic_v1.method.wrap_method( + self.create_engine, + default_timeout=None, + client_info=client_info, + ), + self.delete_engine: gapic_v1.method.wrap_method( + self.delete_engine, + default_timeout=None, + client_info=client_info, + ), + self.update_engine: gapic_v1.method.wrap_method( + self.update_engine, + default_timeout=None, + client_info=client_info, + ), + self.get_engine: gapic_v1.method.wrap_method( + self.get_engine, + default_timeout=None, + client_info=client_info, + ), + self.list_engines: gapic_v1.method.wrap_method( + self.list_engines, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_engine( + self, + ) -> Callable[ + [engine_service.CreateEngineRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_engine( + self, + ) -> Callable[ + [engine_service.DeleteEngineRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_engine( + self, + ) -> Callable[ + [engine_service.UpdateEngineRequest], + Union[gcd_engine.Engine, Awaitable[gcd_engine.Engine]], + ]: + raise NotImplementedError() + + @property + def get_engine( + self, + ) -> Callable[ + [engine_service.GetEngineRequest], + Union[engine.Engine, Awaitable[engine.Engine]], + ]: + raise NotImplementedError() + + @property + def list_engines( + self, + ) -> Callable[ + [engine_service.ListEnginesRequest], + Union[ + engine_service.ListEnginesResponse, + Awaitable[engine_service.ListEnginesResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("EngineServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/grpc.py new file mode 100644 index 000000000000..65797061b18b --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/grpc.py @@ -0,0 +1,426 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1.types import engine +from google.cloud.discoveryengine_v1.types import engine as gcd_engine +from google.cloud.discoveryengine_v1.types import engine_service + +from .base import DEFAULT_CLIENT_INFO, EngineServiceTransport + + +class EngineServiceGrpcTransport(EngineServiceTransport): + """gRPC backend transport for EngineService. + + Service for managing + [Engine][google.cloud.discoveryengine.v1.Engine] configuration. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_engine( + self, + ) -> Callable[[engine_service.CreateEngineRequest], operations_pb2.Operation]: + r"""Return a callable for the create engine method over gRPC. + + Creates a [Engine][google.cloud.discoveryengine.v1.Engine]. + + Returns: + Callable[[~.CreateEngineRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_engine" not in self._stubs: + self._stubs["create_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.EngineService/CreateEngine", + request_serializer=engine_service.CreateEngineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_engine"] + + @property + def delete_engine( + self, + ) -> Callable[[engine_service.DeleteEngineRequest], operations_pb2.Operation]: + r"""Return a callable for the delete engine method over gRPC. + + Deletes a [Engine][google.cloud.discoveryengine.v1.Engine]. + + Returns: + Callable[[~.DeleteEngineRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_engine" not in self._stubs: + self._stubs["delete_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.EngineService/DeleteEngine", + request_serializer=engine_service.DeleteEngineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_engine"] + + @property + def update_engine( + self, + ) -> Callable[[engine_service.UpdateEngineRequest], gcd_engine.Engine]: + r"""Return a callable for the update engine method over gRPC. + + Updates an [Engine][google.cloud.discoveryengine.v1.Engine] + + Returns: + Callable[[~.UpdateEngineRequest], + ~.Engine]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_engine" not in self._stubs: + self._stubs["update_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.EngineService/UpdateEngine", + request_serializer=engine_service.UpdateEngineRequest.serialize, + response_deserializer=gcd_engine.Engine.deserialize, + ) + return self._stubs["update_engine"] + + @property + def get_engine(self) -> Callable[[engine_service.GetEngineRequest], engine.Engine]: + r"""Return a callable for the get engine method over gRPC. + + Gets a [Engine][google.cloud.discoveryengine.v1.Engine]. + + Returns: + Callable[[~.GetEngineRequest], + ~.Engine]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_engine" not in self._stubs: + self._stubs["get_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.EngineService/GetEngine", + request_serializer=engine_service.GetEngineRequest.serialize, + response_deserializer=engine.Engine.deserialize, + ) + return self._stubs["get_engine"] + + @property + def list_engines( + self, + ) -> Callable[ + [engine_service.ListEnginesRequest], engine_service.ListEnginesResponse + ]: + r"""Return a callable for the list engines method over gRPC. + + Lists all the [Engine][google.cloud.discoveryengine.v1.Engine]s + associated with the project. + + Returns: + Callable[[~.ListEnginesRequest], + ~.ListEnginesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_engines" not in self._stubs: + self._stubs["list_engines"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.EngineService/ListEngines", + request_serializer=engine_service.ListEnginesRequest.serialize, + response_deserializer=engine_service.ListEnginesResponse.deserialize, + ) + return self._stubs["list_engines"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("EngineServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..776d9c4c8ff7 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/grpc_asyncio.py @@ -0,0 +1,434 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1.types import engine +from google.cloud.discoveryengine_v1.types import engine as gcd_engine +from google.cloud.discoveryengine_v1.types import engine_service + +from .base import DEFAULT_CLIENT_INFO, EngineServiceTransport +from .grpc import EngineServiceGrpcTransport + + +class EngineServiceGrpcAsyncIOTransport(EngineServiceTransport): + """gRPC AsyncIO backend transport for EngineService. + + Service for managing + [Engine][google.cloud.discoveryengine.v1.Engine] configuration. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_engine( + self, + ) -> Callable[ + [engine_service.CreateEngineRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create engine method over gRPC. + + Creates a [Engine][google.cloud.discoveryengine.v1.Engine]. + + Returns: + Callable[[~.CreateEngineRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_engine" not in self._stubs: + self._stubs["create_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.EngineService/CreateEngine", + request_serializer=engine_service.CreateEngineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_engine"] + + @property + def delete_engine( + self, + ) -> Callable[ + [engine_service.DeleteEngineRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete engine method over gRPC. + + Deletes a [Engine][google.cloud.discoveryengine.v1.Engine]. + + Returns: + Callable[[~.DeleteEngineRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_engine" not in self._stubs: + self._stubs["delete_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.EngineService/DeleteEngine", + request_serializer=engine_service.DeleteEngineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_engine"] + + @property + def update_engine( + self, + ) -> Callable[[engine_service.UpdateEngineRequest], Awaitable[gcd_engine.Engine]]: + r"""Return a callable for the update engine method over gRPC. + + Updates an [Engine][google.cloud.discoveryengine.v1.Engine] + + Returns: + Callable[[~.UpdateEngineRequest], + Awaitable[~.Engine]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_engine" not in self._stubs: + self._stubs["update_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.EngineService/UpdateEngine", + request_serializer=engine_service.UpdateEngineRequest.serialize, + response_deserializer=gcd_engine.Engine.deserialize, + ) + return self._stubs["update_engine"] + + @property + def get_engine( + self, + ) -> Callable[[engine_service.GetEngineRequest], Awaitable[engine.Engine]]: + r"""Return a callable for the get engine method over gRPC. + + Gets a [Engine][google.cloud.discoveryengine.v1.Engine]. + + Returns: + Callable[[~.GetEngineRequest], + Awaitable[~.Engine]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_engine" not in self._stubs: + self._stubs["get_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.EngineService/GetEngine", + request_serializer=engine_service.GetEngineRequest.serialize, + response_deserializer=engine.Engine.deserialize, + ) + return self._stubs["get_engine"] + + @property + def list_engines( + self, + ) -> Callable[ + [engine_service.ListEnginesRequest], + Awaitable[engine_service.ListEnginesResponse], + ]: + r"""Return a callable for the list engines method over gRPC. + + Lists all the [Engine][google.cloud.discoveryengine.v1.Engine]s + associated with the project. + + Returns: + Callable[[~.ListEnginesRequest], + Awaitable[~.ListEnginesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_engines" not in self._stubs: + self._stubs["list_engines"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.EngineService/ListEngines", + request_serializer=engine_service.ListEnginesRequest.serialize, + response_deserializer=engine_service.ListEnginesResponse.deserialize, + ) + return self._stubs["list_engines"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("EngineServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py new file mode 100644 index 000000000000..ec6ef1e69197 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py @@ -0,0 +1,1294 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import engine +from google.cloud.discoveryengine_v1.types import engine as gcd_engine +from google.cloud.discoveryengine_v1.types import engine_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import EngineServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class EngineServiceRestInterceptor: + """Interceptor for EngineService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the EngineServiceRestTransport. + + .. code-block:: python + class MyCustomEngineServiceInterceptor(EngineServiceRestInterceptor): + def pre_create_engine(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_engine(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_engine(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_engine(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_engine(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_engine(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_engines(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_engines(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_engine(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_engine(self, response): + logging.log(f"Received response: {response}") + return response + + transport = EngineServiceRestTransport(interceptor=MyCustomEngineServiceInterceptor()) + client = EngineServiceClient(transport=transport) + + + """ + + def pre_create_engine( + self, + request: engine_service.CreateEngineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[engine_service.CreateEngineRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_engine + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_create_engine( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_engine + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + + def pre_delete_engine( + self, + request: engine_service.DeleteEngineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[engine_service.DeleteEngineRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_engine + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_delete_engine( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_engine + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + + def pre_get_engine( + self, + request: engine_service.GetEngineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[engine_service.GetEngineRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_engine + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_get_engine(self, response: engine.Engine) -> engine.Engine: + """Post-rpc interceptor for get_engine + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + + def pre_list_engines( + self, + request: engine_service.ListEnginesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[engine_service.ListEnginesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_engines + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_list_engines( + self, response: engine_service.ListEnginesResponse + ) -> engine_service.ListEnginesResponse: + """Post-rpc interceptor for list_engines + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + + def pre_update_engine( + self, + request: engine_service.UpdateEngineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[engine_service.UpdateEngineRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_engine + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_update_engine(self, response: gcd_engine.Engine) -> gcd_engine.Engine: + """Post-rpc interceptor for update_engine + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class EngineServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: EngineServiceRestInterceptor + + +class EngineServiceRestTransport(EngineServiceTransport): + """REST backend transport for EngineService. + + Service for managing + [Engine][google.cloud.discoveryengine.v1.Engine] configuration. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[EngineServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or EngineServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateEngine(EngineServiceRestStub): + def __hash__(self): + return hash("CreateEngine") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "engineId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: engine_service.CreateEngineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create engine method over HTTP. + + Args: + request (~.engine_service.CreateEngineRequest): + The request object. Request for + [EngineService.CreateEngine][google.cloud.discoveryengine.v1.EngineService.CreateEngine] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*}/engines", + "body": "engine", + }, + ] + request, metadata = self._interceptor.pre_create_engine(request, metadata) + pb_request = engine_service.CreateEngineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_engine(resp) + return resp + + class _DeleteEngine(EngineServiceRestStub): + def __hash__(self): + return hash("DeleteEngine") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: engine_service.DeleteEngineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete engine method over HTTP. + + Args: + request (~.engine_service.DeleteEngineRequest): + The request object. Request message for + [EngineService.DeleteEngine][google.cloud.discoveryengine.v1.EngineService.DeleteEngine] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_engine(request, metadata) + pb_request = engine_service.DeleteEngineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_engine(resp) + return resp + + class _GetEngine(EngineServiceRestStub): + def __hash__(self): + return hash("GetEngine") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: engine_service.GetEngineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> engine.Engine: + r"""Call the get engine method over HTTP. + + Args: + request (~.engine_service.GetEngineRequest): + The request object. Request message for + [EngineService.GetEngine][google.cloud.discoveryengine.v1.EngineService.GetEngine] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.engine.Engine: + Metadata that describes the training and serving + parameters of an + [Engine][google.cloud.discoveryengine.v1.Engine]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}", + }, + ] + request, metadata = self._interceptor.pre_get_engine(request, metadata) + pb_request = engine_service.GetEngineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = engine.Engine() + pb_resp = engine.Engine.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_engine(resp) + return resp + + class _ListEngines(EngineServiceRestStub): + def __hash__(self): + return hash("ListEngines") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: engine_service.ListEnginesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> engine_service.ListEnginesResponse: + r"""Call the list engines method over HTTP. + + Args: + request (~.engine_service.ListEnginesRequest): + The request object. Request message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1.EngineService.ListEngines] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.engine_service.ListEnginesResponse: + Response message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1.EngineService.ListEngines] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/collections/*}/engines", + }, + ] + request, metadata = self._interceptor.pre_list_engines(request, metadata) + pb_request = engine_service.ListEnginesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = engine_service.ListEnginesResponse() + pb_resp = engine_service.ListEnginesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_engines(resp) + return resp + + class _UpdateEngine(EngineServiceRestStub): + def __hash__(self): + return hash("UpdateEngine") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: engine_service.UpdateEngineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_engine.Engine: + r"""Call the update engine method over HTTP. + + Args: + request (~.engine_service.UpdateEngineRequest): + The request object. Request message for + [EngineService.UpdateEngine][google.cloud.discoveryengine.v1.EngineService.UpdateEngine] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_engine.Engine: + Metadata that describes the training and serving + parameters of an + [Engine][google.cloud.discoveryengine.v1.Engine]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{engine.name=projects/*/locations/*/collections/*/engines/*}", + "body": "engine", + }, + ] + request, metadata = self._interceptor.pre_update_engine(request, metadata) + pb_request = engine_service.UpdateEngineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_engine.Engine() + pb_resp = gcd_engine.Engine.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_engine(resp) + return resp + + @property + def create_engine( + self, + ) -> Callable[[engine_service.CreateEngineRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEngine(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_engine( + self, + ) -> Callable[[engine_service.DeleteEngineRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEngine(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_engine(self) -> Callable[[engine_service.GetEngineRequest], engine.Engine]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEngine(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_engines( + self, + ) -> Callable[ + [engine_service.ListEnginesRequest], engine_service.ListEnginesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEngines(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_engine( + self, + ) -> Callable[[engine_service.UpdateEngineRequest], gcd_engine.Engine]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEngine(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(EngineServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(EngineServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("EngineServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py index 3af92401db53..879612394723 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py @@ -391,6 +391,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -449,6 +453,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", @@ -1083,6 +1091,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -1201,6 +1213,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py index c3349a73b1f7..427ffd0353c0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py @@ -298,6 +298,11 @@ def __call__( "uri": "/v1/{serving_config=projects/*/locations/*/collections/*/dataStores/*/servingConfigs/*}:search", "body": "*", }, + { + "method": "post", + "uri": "/v1/{serving_config=projects/*/locations/*/collections/*/engines/*/servingConfigs/*}:search", + "body": "*", + }, ] request, metadata = self._interceptor.pre_search(request, metadata) pb_request = search_service.SearchRequest.pb(request) @@ -390,6 +395,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -508,6 +517,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/__init__.py new file mode 100644 index 000000000000..e99e4a4e4ff5 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SiteSearchEngineServiceAsyncClient +from .client import SiteSearchEngineServiceClient + +__all__ = ( + "SiteSearchEngineServiceClient", + "SiteSearchEngineServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/async_client.py new file mode 100644 index 000000000000..cdf203c15ecb --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/async_client.py @@ -0,0 +1,1819 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.services.site_search_engine_service import pagers +from google.cloud.discoveryengine_v1.types import ( + site_search_engine, + site_search_engine_service, +) + +from .client import SiteSearchEngineServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, SiteSearchEngineServiceTransport +from .transports.grpc_asyncio import SiteSearchEngineServiceGrpcAsyncIOTransport + + +class SiteSearchEngineServiceAsyncClient: + """Service for managing site search related resources.""" + + _client: SiteSearchEngineServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = SiteSearchEngineServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SiteSearchEngineServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + SiteSearchEngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = SiteSearchEngineServiceClient._DEFAULT_UNIVERSE + + site_search_engine_path = staticmethod( + SiteSearchEngineServiceClient.site_search_engine_path + ) + parse_site_search_engine_path = staticmethod( + SiteSearchEngineServiceClient.parse_site_search_engine_path + ) + target_site_path = staticmethod(SiteSearchEngineServiceClient.target_site_path) + parse_target_site_path = staticmethod( + SiteSearchEngineServiceClient.parse_target_site_path + ) + common_billing_account_path = staticmethod( + SiteSearchEngineServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SiteSearchEngineServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SiteSearchEngineServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SiteSearchEngineServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SiteSearchEngineServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SiteSearchEngineServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + SiteSearchEngineServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + SiteSearchEngineServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + SiteSearchEngineServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + SiteSearchEngineServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SiteSearchEngineServiceAsyncClient: The constructed client. + """ + return SiteSearchEngineServiceClient.from_service_account_info.__func__(SiteSearchEngineServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SiteSearchEngineServiceAsyncClient: The constructed client. + """ + return SiteSearchEngineServiceClient.from_service_account_file.__func__(SiteSearchEngineServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SiteSearchEngineServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SiteSearchEngineServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SiteSearchEngineServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(SiteSearchEngineServiceClient).get_transport_class, + type(SiteSearchEngineServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SiteSearchEngineServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the site search engine service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SiteSearchEngineServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SiteSearchEngineServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_site_search_engine( + self, + request: Optional[ + Union[site_search_engine_service.GetSiteSearchEngineRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.SiteSearchEngine: + r"""Gets the + [SiteSearchEngine][google.cloud.discoveryengine.v1.SiteSearchEngine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_get_site_search_engine(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetSiteSearchEngineRequest( + name="name_value", + ) + + # Make the request + response = await client.get_site_search_engine(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.GetSiteSearchEngineRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.GetSiteSearchEngine][google.cloud.discoveryengine.v1.SiteSearchEngineService.GetSiteSearchEngine] + method. + name (:class:`str`): + Required. Resource name of + [SiteSearchEngine][google.cloud.discoveryengine.v1.SiteSearchEngine], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to access the + [SiteSearchEngine], regardless of whether or not it + exists, a PERMISSION_DENIED error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.SiteSearchEngine: + SiteSearchEngine captures DataStore + level site search persisting + configurations. It is a singleton value + per data store. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.GetSiteSearchEngineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_site_search_engine, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_target_site( + self, + request: Optional[ + Union[site_search_engine_service.CreateTargetSiteRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + target_site: Optional[site_search_engine.TargetSite] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_create_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1.CreateTargetSiteRequest( + parent="parent_value", + target_site=target_site, + ) + + # Make the request + operation = client.create_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.CreateTargetSiteRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.CreateTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.CreateTargetSite] + method. + parent (:class:`str`): + Required. Parent resource name of + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_site (:class:`google.cloud.discoveryengine_v1.types.TargetSite`): + Required. The + [TargetSite][google.cloud.discoveryengine.v1.TargetSite] + to create. + + This corresponds to the ``target_site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1.types.TargetSite` + A target site for the SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, target_site]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.CreateTargetSiteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if target_site is not None: + request.target_site = target_site + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_target_site, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine.TargetSite, + metadata_type=site_search_engine_service.CreateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + async def batch_create_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.BatchCreateTargetSitesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates [TargetSite][google.cloud.discoveryengine.v1.TargetSite] + in a batch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_batch_create_target_sites(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + requests = discoveryengine_v1.CreateTargetSiteRequest() + requests.parent = "parent_value" + requests.target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1.BatchCreateTargetSitesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesResponse` Response message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites] + method. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.BatchCreateTargetSitesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_create_target_sites, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine_service.BatchCreateTargetSitesResponse, + metadata_type=site_search_engine_service.BatchCreateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + async def get_target_site( + self, + request: Optional[ + Union[site_search_engine_service.GetTargetSiteRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.TargetSite: + r"""Gets a [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_get_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetTargetSiteRequest( + name="name_value", + ) + + # Make the request + response = await client.get_target_site(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.GetTargetSiteRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.GetTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.GetTargetSite] + method. + name (:class:`str`): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1.TargetSite] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.TargetSite: + A target site for the + SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.GetTargetSiteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_target_site, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_target_site( + self, + request: Optional[ + Union[site_search_engine_service.UpdateTargetSiteRequest, dict] + ] = None, + *, + target_site: Optional[site_search_engine.TargetSite] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_update_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1.UpdateTargetSiteRequest( + target_site=target_site, + ) + + # Make the request + operation = client.update_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.UpdateTargetSiteRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.UpdateTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.UpdateTargetSite] + method. + target_site (:class:`google.cloud.discoveryengine_v1.types.TargetSite`): + Required. The target site to update. If the caller does + not have permission to update the + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [TargetSite][google.cloud.discoveryengine.v1.TargetSite] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``target_site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1.types.TargetSite` + A target site for the SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([target_site]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.UpdateTargetSiteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if target_site is not None: + request.target_site = target_site + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_target_site, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("target_site.name", request.target_site.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine.TargetSite, + metadata_type=site_search_engine_service.UpdateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + async def delete_target_site( + self, + request: Optional[ + Union[site_search_engine_service.DeleteTargetSiteRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_delete_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteTargetSiteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.DeleteTargetSiteRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.DeleteTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.DeleteTargetSite] + method. + name (:class:`str`): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1.TargetSite] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.DeleteTargetSiteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_target_site, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=site_search_engine_service.DeleteTargetSiteMetadata, + ) + + # Done; return the response. + return response + + async def list_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.ListTargetSitesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTargetSitesAsyncPager: + r"""Gets a list of + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_list_target_sites(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_target_sites(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.ListTargetSitesRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.ListTargetSites] + method. + parent (:class:`str`): + Required. The parent site search engine resource name, + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to list + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]s + under this site search engine, regardless of whether or + not this branch exists, a PERMISSION_DENIED error is + returned. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.site_search_engine_service.pagers.ListTargetSitesAsyncPager: + Response message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.ListTargetSites] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.ListTargetSitesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_target_sites, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTargetSitesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def enable_advanced_site_search( + self, + request: Optional[ + Union[site_search_engine_service.EnableAdvancedSiteSearchRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Upgrade from basic site search to advanced site + search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_enable_advanced_site_search(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.EnableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.enable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.EnableAdvancedSiteSearchRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.EnableAdvancedSiteSearchResponse` Response message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.EnableAdvancedSiteSearchRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.enable_advanced_site_search, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine_service.EnableAdvancedSiteSearchResponse, + metadata_type=site_search_engine_service.EnableAdvancedSiteSearchMetadata, + ) + + # Done; return the response. + return response + + async def disable_advanced_site_search( + self, + request: Optional[ + Union[site_search_engine_service.DisableAdvancedSiteSearchRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Downgrade from advanced site search to basic site + search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_disable_advanced_site_search(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DisableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.disable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.DisableAdvancedSiteSearchRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.DisableAdvancedSiteSearchResponse` Response message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.DisableAdvancedSiteSearchRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.disable_advanced_site_search, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine_service.DisableAdvancedSiteSearchResponse, + metadata_type=site_search_engine_service.DisableAdvancedSiteSearchMetadata, + ) + + # Done; return the response. + return response + + async def recrawl_uris( + self, + request: Optional[ + Union[site_search_engine_service.RecrawlUrisRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Request on-demand recrawl for a list of URIs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_recrawl_uris(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.RecrawlUrisRequest( + site_search_engine="site_search_engine_value", + uris=['uris_value1', 'uris_value2'], + ) + + # Make the request + operation = client.recrawl_uris(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.RecrawlUrisRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1.SiteSearchEngineService.RecrawlUris] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.RecrawlUrisResponse` Response message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1.SiteSearchEngineService.RecrawlUris] + method. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.RecrawlUrisRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.recrawl_uris, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine_service.RecrawlUrisResponse, + metadata_type=site_search_engine_service.RecrawlUrisMetadata, + ) + + # Done; return the response. + return response + + async def batch_verify_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.BatchVerifyTargetSitesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Verify target sites' ownership and validity. + This API sends all the target sites under site search + engine for verification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_batch_verify_target_sites(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchVerifyTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.batch_verify_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesResponse` Response message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites] + method. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.BatchVerifyTargetSitesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_verify_target_sites, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine_service.BatchVerifyTargetSitesResponse, + metadata_type=site_search_engine_service.BatchVerifyTargetSitesMetadata, + ) + + # Done; return the response. + return response + + async def fetch_domain_verification_status( + self, + request: Optional[ + Union[site_search_engine_service.FetchDomainVerificationStatusRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchDomainVerificationStatusAsyncPager: + r"""Returns list of target sites with its domain verification + status. This method can only be called under data store with + BASIC_SITE_SEARCH state at the moment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_fetch_domain_verification_status(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.FetchDomainVerificationStatusRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + page_result = client.fetch_domain_verification_status(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.FetchDomainVerificationStatusRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.site_search_engine_service.pagers.FetchDomainVerificationStatusAsyncPager: + Response message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.FetchDomainVerificationStatusRequest( + request + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_domain_verification_status, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchDomainVerificationStatusAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "SiteSearchEngineServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SiteSearchEngineServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/client.py new file mode 100644 index 000000000000..826d4552fb4a --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/client.py @@ -0,0 +1,2284 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.services.site_search_engine_service import pagers +from google.cloud.discoveryengine_v1.types import ( + site_search_engine, + site_search_engine_service, +) + +from .transports.base import DEFAULT_CLIENT_INFO, SiteSearchEngineServiceTransport +from .transports.grpc import SiteSearchEngineServiceGrpcTransport +from .transports.grpc_asyncio import SiteSearchEngineServiceGrpcAsyncIOTransport +from .transports.rest import SiteSearchEngineServiceRestTransport + + +class SiteSearchEngineServiceClientMeta(type): + """Metaclass for the SiteSearchEngineService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[SiteSearchEngineServiceTransport]] + _transport_registry["grpc"] = SiteSearchEngineServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SiteSearchEngineServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SiteSearchEngineServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SiteSearchEngineServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SiteSearchEngineServiceClient(metaclass=SiteSearchEngineServiceClientMeta): + """Service for managing site search related resources.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SiteSearchEngineServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SiteSearchEngineServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SiteSearchEngineServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SiteSearchEngineServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def site_search_engine_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified site_search_engine string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/siteSearchEngine".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_site_search_engine_path(path: str) -> Dict[str, str]: + """Parses a site_search_engine path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/siteSearchEngine$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def target_site_path( + project: str, + location: str, + data_store: str, + target_site: str, + ) -> str: + """Returns a fully-qualified target_site string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}".format( + project=project, + location=location, + data_store=data_store, + target_site=target_site, + ) + + @staticmethod + def parse_target_site_path(path: str) -> Dict[str, str]: + """Parses a target_site path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/siteSearchEngine/targetSites/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = SiteSearchEngineServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = SiteSearchEngineServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + SiteSearchEngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = SiteSearchEngineServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = SiteSearchEngineServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or SiteSearchEngineServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SiteSearchEngineServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the site search engine service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SiteSearchEngineServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = SiteSearchEngineServiceClient._read_environment_variables() + self._client_cert_source = ( + SiteSearchEngineServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = SiteSearchEngineServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, SiteSearchEngineServiceTransport) + if transport_provided: + # transport is a SiteSearchEngineServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(SiteSearchEngineServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or SiteSearchEngineServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(cast(str, transport)) + self._transport = Transport( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_site_search_engine( + self, + request: Optional[ + Union[site_search_engine_service.GetSiteSearchEngineRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.SiteSearchEngine: + r"""Gets the + [SiteSearchEngine][google.cloud.discoveryengine.v1.SiteSearchEngine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_get_site_search_engine(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetSiteSearchEngineRequest( + name="name_value", + ) + + # Make the request + response = client.get_site_search_engine(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.GetSiteSearchEngineRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.GetSiteSearchEngine][google.cloud.discoveryengine.v1.SiteSearchEngineService.GetSiteSearchEngine] + method. + name (str): + Required. Resource name of + [SiteSearchEngine][google.cloud.discoveryengine.v1.SiteSearchEngine], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to access the + [SiteSearchEngine], regardless of whether or not it + exists, a PERMISSION_DENIED error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.SiteSearchEngine: + SiteSearchEngine captures DataStore + level site search persisting + configurations. It is a singleton value + per data store. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.GetSiteSearchEngineRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.GetSiteSearchEngineRequest + ): + request = site_search_engine_service.GetSiteSearchEngineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_site_search_engine] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_target_site( + self, + request: Optional[ + Union[site_search_engine_service.CreateTargetSiteRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + target_site: Optional[site_search_engine.TargetSite] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_create_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1.CreateTargetSiteRequest( + parent="parent_value", + target_site=target_site, + ) + + # Make the request + operation = client.create_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.CreateTargetSiteRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.CreateTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.CreateTargetSite] + method. + parent (str): + Required. Parent resource name of + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_site (google.cloud.discoveryengine_v1.types.TargetSite): + Required. The + [TargetSite][google.cloud.discoveryengine.v1.TargetSite] + to create. + + This corresponds to the ``target_site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1.types.TargetSite` + A target site for the SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, target_site]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.CreateTargetSiteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.CreateTargetSiteRequest): + request = site_search_engine_service.CreateTargetSiteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if target_site is not None: + request.target_site = target_site + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_target_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine.TargetSite, + metadata_type=site_search_engine_service.CreateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + def batch_create_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.BatchCreateTargetSitesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates [TargetSite][google.cloud.discoveryengine.v1.TargetSite] + in a batch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_batch_create_target_sites(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + requests = discoveryengine_v1.CreateTargetSiteRequest() + requests.parent = "parent_value" + requests.target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1.BatchCreateTargetSitesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesResponse` Response message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.BatchCreateTargetSitesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.BatchCreateTargetSitesRequest + ): + request = site_search_engine_service.BatchCreateTargetSitesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_create_target_sites + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine_service.BatchCreateTargetSitesResponse, + metadata_type=site_search_engine_service.BatchCreateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + def get_target_site( + self, + request: Optional[ + Union[site_search_engine_service.GetTargetSiteRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.TargetSite: + r"""Gets a [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_get_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetTargetSiteRequest( + name="name_value", + ) + + # Make the request + response = client.get_target_site(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.GetTargetSiteRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.GetTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.GetTargetSite] + method. + name (str): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1.TargetSite] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.TargetSite: + A target site for the + SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.GetTargetSiteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.GetTargetSiteRequest): + request = site_search_engine_service.GetTargetSiteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_target_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_target_site( + self, + request: Optional[ + Union[site_search_engine_service.UpdateTargetSiteRequest, dict] + ] = None, + *, + target_site: Optional[site_search_engine.TargetSite] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_update_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1.UpdateTargetSiteRequest( + target_site=target_site, + ) + + # Make the request + operation = client.update_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.UpdateTargetSiteRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.UpdateTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.UpdateTargetSite] + method. + target_site (google.cloud.discoveryengine_v1.types.TargetSite): + Required. The target site to update. If the caller does + not have permission to update the + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [TargetSite][google.cloud.discoveryengine.v1.TargetSite] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``target_site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1.types.TargetSite` + A target site for the SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([target_site]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.UpdateTargetSiteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.UpdateTargetSiteRequest): + request = site_search_engine_service.UpdateTargetSiteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if target_site is not None: + request.target_site = target_site + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_target_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("target_site.name", request.target_site.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine.TargetSite, + metadata_type=site_search_engine_service.UpdateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + def delete_target_site( + self, + request: Optional[ + Union[site_search_engine_service.DeleteTargetSiteRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_delete_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteTargetSiteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.DeleteTargetSiteRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.DeleteTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.DeleteTargetSite] + method. + name (str): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1.TargetSite] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.DeleteTargetSiteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.DeleteTargetSiteRequest): + request = site_search_engine_service.DeleteTargetSiteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_target_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=site_search_engine_service.DeleteTargetSiteMetadata, + ) + + # Done; return the response. + return response + + def list_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.ListTargetSitesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTargetSitesPager: + r"""Gets a list of + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_list_target_sites(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_target_sites(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.ListTargetSitesRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.ListTargetSites] + method. + parent (str): + Required. The parent site search engine resource name, + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to list + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]s + under this site search engine, regardless of whether or + not this branch exists, a PERMISSION_DENIED error is + returned. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.site_search_engine_service.pagers.ListTargetSitesPager: + Response message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.ListTargetSites] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.ListTargetSitesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.ListTargetSitesRequest): + request = site_search_engine_service.ListTargetSitesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_target_sites] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTargetSitesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def enable_advanced_site_search( + self, + request: Optional[ + Union[site_search_engine_service.EnableAdvancedSiteSearchRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Upgrade from basic site search to advanced site + search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_enable_advanced_site_search(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.EnableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.enable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.EnableAdvancedSiteSearchRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.EnableAdvancedSiteSearchResponse` Response message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.EnableAdvancedSiteSearchRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.EnableAdvancedSiteSearchRequest + ): + request = site_search_engine_service.EnableAdvancedSiteSearchRequest( + request + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.enable_advanced_site_search + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine_service.EnableAdvancedSiteSearchResponse, + metadata_type=site_search_engine_service.EnableAdvancedSiteSearchMetadata, + ) + + # Done; return the response. + return response + + def disable_advanced_site_search( + self, + request: Optional[ + Union[site_search_engine_service.DisableAdvancedSiteSearchRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Downgrade from advanced site search to basic site + search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_disable_advanced_site_search(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DisableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.disable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.DisableAdvancedSiteSearchRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.DisableAdvancedSiteSearchResponse` Response message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.DisableAdvancedSiteSearchRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.DisableAdvancedSiteSearchRequest + ): + request = site_search_engine_service.DisableAdvancedSiteSearchRequest( + request + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.disable_advanced_site_search + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine_service.DisableAdvancedSiteSearchResponse, + metadata_type=site_search_engine_service.DisableAdvancedSiteSearchMetadata, + ) + + # Done; return the response. + return response + + def recrawl_uris( + self, + request: Optional[ + Union[site_search_engine_service.RecrawlUrisRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Request on-demand recrawl for a list of URIs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_recrawl_uris(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.RecrawlUrisRequest( + site_search_engine="site_search_engine_value", + uris=['uris_value1', 'uris_value2'], + ) + + # Make the request + operation = client.recrawl_uris(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.RecrawlUrisRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1.SiteSearchEngineService.RecrawlUris] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.RecrawlUrisResponse` Response message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1.SiteSearchEngineService.RecrawlUris] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.RecrawlUrisRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.RecrawlUrisRequest): + request = site_search_engine_service.RecrawlUrisRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.recrawl_uris] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine_service.RecrawlUrisResponse, + metadata_type=site_search_engine_service.RecrawlUrisMetadata, + ) + + # Done; return the response. + return response + + def batch_verify_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.BatchVerifyTargetSitesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Verify target sites' ownership and validity. + This API sends all the target sites under site search + engine for verification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_batch_verify_target_sites(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchVerifyTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.batch_verify_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesResponse` Response message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.BatchVerifyTargetSitesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.BatchVerifyTargetSitesRequest + ): + request = site_search_engine_service.BatchVerifyTargetSitesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_verify_target_sites + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine_service.BatchVerifyTargetSitesResponse, + metadata_type=site_search_engine_service.BatchVerifyTargetSitesMetadata, + ) + + # Done; return the response. + return response + + def fetch_domain_verification_status( + self, + request: Optional[ + Union[site_search_engine_service.FetchDomainVerificationStatusRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchDomainVerificationStatusPager: + r"""Returns list of target sites with its domain verification + status. This method can only be called under data store with + BASIC_SITE_SEARCH state at the moment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_fetch_domain_verification_status(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.FetchDomainVerificationStatusRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + page_result = client.fetch_domain_verification_status(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.FetchDomainVerificationStatusRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.services.site_search_engine_service.pagers.FetchDomainVerificationStatusPager: + Response message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.FetchDomainVerificationStatusRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.FetchDomainVerificationStatusRequest + ): + request = site_search_engine_service.FetchDomainVerificationStatusRequest( + request + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.fetch_domain_verification_status + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchDomainVerificationStatusPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SiteSearchEngineServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SiteSearchEngineServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/pagers.py new file mode 100644 index 000000000000..02677c25008c --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/pagers.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1.types import ( + site_search_engine, + site_search_engine_service, +) + + +class ListTargetSitesPager: + """A pager for iterating through ``list_target_sites`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.ListTargetSitesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``target_sites`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTargetSites`` requests and continue to iterate + through the ``target_sites`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.ListTargetSitesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., site_search_engine_service.ListTargetSitesResponse], + request: site_search_engine_service.ListTargetSitesRequest, + response: site_search_engine_service.ListTargetSitesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.ListTargetSitesRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.ListTargetSitesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = site_search_engine_service.ListTargetSitesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[site_search_engine_service.ListTargetSitesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[site_search_engine.TargetSite]: + for page in self.pages: + yield from page.target_sites + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTargetSitesAsyncPager: + """A pager for iterating through ``list_target_sites`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.ListTargetSitesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``target_sites`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTargetSites`` requests and continue to iterate + through the ``target_sites`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.ListTargetSitesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[site_search_engine_service.ListTargetSitesResponse] + ], + request: site_search_engine_service.ListTargetSitesRequest, + response: site_search_engine_service.ListTargetSitesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.ListTargetSitesRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.ListTargetSitesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = site_search_engine_service.ListTargetSitesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[site_search_engine_service.ListTargetSitesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[site_search_engine.TargetSite]: + async def async_generator(): + async for page in self.pages: + for response in page.target_sites: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchDomainVerificationStatusPager: + """A pager for iterating through ``fetch_domain_verification_status`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.FetchDomainVerificationStatusResponse` object, and + provides an ``__iter__`` method to iterate through its + ``target_sites`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchDomainVerificationStatus`` requests and continue to iterate + through the ``target_sites`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.FetchDomainVerificationStatusResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., site_search_engine_service.FetchDomainVerificationStatusResponse + ], + request: site_search_engine_service.FetchDomainVerificationStatusRequest, + response: site_search_engine_service.FetchDomainVerificationStatusResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.FetchDomainVerificationStatusRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.FetchDomainVerificationStatusResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = site_search_engine_service.FetchDomainVerificationStatusRequest( + request + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[site_search_engine_service.FetchDomainVerificationStatusResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[site_search_engine.TargetSite]: + for page in self.pages: + yield from page.target_sites + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchDomainVerificationStatusAsyncPager: + """A pager for iterating through ``fetch_domain_verification_status`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1.types.FetchDomainVerificationStatusResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``target_sites`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchDomainVerificationStatus`` requests and continue to iterate + through the ``target_sites`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1.types.FetchDomainVerificationStatusResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + Awaitable[site_search_engine_service.FetchDomainVerificationStatusResponse], + ], + request: site_search_engine_service.FetchDomainVerificationStatusRequest, + response: site_search_engine_service.FetchDomainVerificationStatusResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1.types.FetchDomainVerificationStatusRequest): + The initial request object. + response (google.cloud.discoveryengine_v1.types.FetchDomainVerificationStatusResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = site_search_engine_service.FetchDomainVerificationStatusRequest( + request + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[ + site_search_engine_service.FetchDomainVerificationStatusResponse + ]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[site_search_engine.TargetSite]: + async def async_generator(): + async for page in self.pages: + for response in page.target_sites: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/__init__.py new file mode 100644 index 000000000000..398fe08609e9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SiteSearchEngineServiceTransport +from .grpc import SiteSearchEngineServiceGrpcTransport +from .grpc_asyncio import SiteSearchEngineServiceGrpcAsyncIOTransport +from .rest import ( + SiteSearchEngineServiceRestInterceptor, + SiteSearchEngineServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[SiteSearchEngineServiceTransport]] +_transport_registry["grpc"] = SiteSearchEngineServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SiteSearchEngineServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SiteSearchEngineServiceRestTransport + +__all__ = ( + "SiteSearchEngineServiceTransport", + "SiteSearchEngineServiceGrpcTransport", + "SiteSearchEngineServiceGrpcAsyncIOTransport", + "SiteSearchEngineServiceRestTransport", + "SiteSearchEngineServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/base.py new file mode 100644 index 000000000000..90fe326b23e6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/base.py @@ -0,0 +1,353 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version +from google.cloud.discoveryengine_v1.types import ( + site_search_engine, + site_search_engine_service, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class SiteSearchEngineServiceTransport(abc.ABC): + """Abstract transport class for SiteSearchEngineService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_site_search_engine: gapic_v1.method.wrap_method( + self.get_site_search_engine, + default_timeout=None, + client_info=client_info, + ), + self.create_target_site: gapic_v1.method.wrap_method( + self.create_target_site, + default_timeout=None, + client_info=client_info, + ), + self.batch_create_target_sites: gapic_v1.method.wrap_method( + self.batch_create_target_sites, + default_timeout=None, + client_info=client_info, + ), + self.get_target_site: gapic_v1.method.wrap_method( + self.get_target_site, + default_timeout=None, + client_info=client_info, + ), + self.update_target_site: gapic_v1.method.wrap_method( + self.update_target_site, + default_timeout=None, + client_info=client_info, + ), + self.delete_target_site: gapic_v1.method.wrap_method( + self.delete_target_site, + default_timeout=None, + client_info=client_info, + ), + self.list_target_sites: gapic_v1.method.wrap_method( + self.list_target_sites, + default_timeout=None, + client_info=client_info, + ), + self.enable_advanced_site_search: gapic_v1.method.wrap_method( + self.enable_advanced_site_search, + default_timeout=None, + client_info=client_info, + ), + self.disable_advanced_site_search: gapic_v1.method.wrap_method( + self.disable_advanced_site_search, + default_timeout=None, + client_info=client_info, + ), + self.recrawl_uris: gapic_v1.method.wrap_method( + self.recrawl_uris, + default_timeout=None, + client_info=client_info, + ), + self.batch_verify_target_sites: gapic_v1.method.wrap_method( + self.batch_verify_target_sites, + default_timeout=None, + client_info=client_info, + ), + self.fetch_domain_verification_status: gapic_v1.method.wrap_method( + self.fetch_domain_verification_status, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def get_site_search_engine( + self, + ) -> Callable[ + [site_search_engine_service.GetSiteSearchEngineRequest], + Union[ + site_search_engine.SiteSearchEngine, + Awaitable[site_search_engine.SiteSearchEngine], + ], + ]: + raise NotImplementedError() + + @property + def create_target_site( + self, + ) -> Callable[ + [site_search_engine_service.CreateTargetSiteRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def batch_create_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchCreateTargetSitesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_target_site( + self, + ) -> Callable[ + [site_search_engine_service.GetTargetSiteRequest], + Union[site_search_engine.TargetSite, Awaitable[site_search_engine.TargetSite]], + ]: + raise NotImplementedError() + + @property + def update_target_site( + self, + ) -> Callable[ + [site_search_engine_service.UpdateTargetSiteRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_target_site( + self, + ) -> Callable[ + [site_search_engine_service.DeleteTargetSiteRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.ListTargetSitesRequest], + Union[ + site_search_engine_service.ListTargetSitesResponse, + Awaitable[site_search_engine_service.ListTargetSitesResponse], + ], + ]: + raise NotImplementedError() + + @property + def enable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.EnableAdvancedSiteSearchRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def disable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.DisableAdvancedSiteSearchRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def recrawl_uris( + self, + ) -> Callable[ + [site_search_engine_service.RecrawlUrisRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def batch_verify_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchVerifyTargetSitesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def fetch_domain_verification_status( + self, + ) -> Callable[ + [site_search_engine_service.FetchDomainVerificationStatusRequest], + Union[ + site_search_engine_service.FetchDomainVerificationStatusResponse, + Awaitable[site_search_engine_service.FetchDomainVerificationStatusResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SiteSearchEngineServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/grpc.py new file mode 100644 index 000000000000..02a518cf60ae --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/grpc.py @@ -0,0 +1,653 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + site_search_engine, + site_search_engine_service, +) + +from .base import DEFAULT_CLIENT_INFO, SiteSearchEngineServiceTransport + + +class SiteSearchEngineServiceGrpcTransport(SiteSearchEngineServiceTransport): + """gRPC backend transport for SiteSearchEngineService. + + Service for managing site search related resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def get_site_search_engine( + self, + ) -> Callable[ + [site_search_engine_service.GetSiteSearchEngineRequest], + site_search_engine.SiteSearchEngine, + ]: + r"""Return a callable for the get site search engine method over gRPC. + + Gets the + [SiteSearchEngine][google.cloud.discoveryengine.v1.SiteSearchEngine]. + + Returns: + Callable[[~.GetSiteSearchEngineRequest], + ~.SiteSearchEngine]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_site_search_engine" not in self._stubs: + self._stubs["get_site_search_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/GetSiteSearchEngine", + request_serializer=site_search_engine_service.GetSiteSearchEngineRequest.serialize, + response_deserializer=site_search_engine.SiteSearchEngine.deserialize, + ) + return self._stubs["get_site_search_engine"] + + @property + def create_target_site( + self, + ) -> Callable[ + [site_search_engine_service.CreateTargetSiteRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create target site method over gRPC. + + Creates a + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + Returns: + Callable[[~.CreateTargetSiteRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_target_site" not in self._stubs: + self._stubs["create_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/CreateTargetSite", + request_serializer=site_search_engine_service.CreateTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_target_site"] + + @property + def batch_create_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchCreateTargetSitesRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the batch create target sites method over gRPC. + + Creates [TargetSite][google.cloud.discoveryengine.v1.TargetSite] + in a batch. + + Returns: + Callable[[~.BatchCreateTargetSitesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_target_sites" not in self._stubs: + self._stubs["batch_create_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/BatchCreateTargetSites", + request_serializer=site_search_engine_service.BatchCreateTargetSitesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_create_target_sites"] + + @property + def get_target_site( + self, + ) -> Callable[ + [site_search_engine_service.GetTargetSiteRequest], site_search_engine.TargetSite + ]: + r"""Return a callable for the get target site method over gRPC. + + Gets a [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + Returns: + Callable[[~.GetTargetSiteRequest], + ~.TargetSite]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_target_site" not in self._stubs: + self._stubs["get_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/GetTargetSite", + request_serializer=site_search_engine_service.GetTargetSiteRequest.serialize, + response_deserializer=site_search_engine.TargetSite.deserialize, + ) + return self._stubs["get_target_site"] + + @property + def update_target_site( + self, + ) -> Callable[ + [site_search_engine_service.UpdateTargetSiteRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update target site method over gRPC. + + Updates a + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + Returns: + Callable[[~.UpdateTargetSiteRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_target_site" not in self._stubs: + self._stubs["update_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/UpdateTargetSite", + request_serializer=site_search_engine_service.UpdateTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_target_site"] + + @property + def delete_target_site( + self, + ) -> Callable[ + [site_search_engine_service.DeleteTargetSiteRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete target site method over gRPC. + + Deletes a + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + Returns: + Callable[[~.DeleteTargetSiteRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_target_site" not in self._stubs: + self._stubs["delete_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/DeleteTargetSite", + request_serializer=site_search_engine_service.DeleteTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_target_site"] + + @property + def list_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.ListTargetSitesRequest], + site_search_engine_service.ListTargetSitesResponse, + ]: + r"""Return a callable for the list target sites method over gRPC. + + Gets a list of + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]s. + + Returns: + Callable[[~.ListTargetSitesRequest], + ~.ListTargetSitesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_target_sites" not in self._stubs: + self._stubs["list_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/ListTargetSites", + request_serializer=site_search_engine_service.ListTargetSitesRequest.serialize, + response_deserializer=site_search_engine_service.ListTargetSitesResponse.deserialize, + ) + return self._stubs["list_target_sites"] + + @property + def enable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.EnableAdvancedSiteSearchRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the enable advanced site search method over gRPC. + + Upgrade from basic site search to advanced site + search. + + Returns: + Callable[[~.EnableAdvancedSiteSearchRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enable_advanced_site_search" not in self._stubs: + self._stubs["enable_advanced_site_search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/EnableAdvancedSiteSearch", + request_serializer=site_search_engine_service.EnableAdvancedSiteSearchRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["enable_advanced_site_search"] + + @property + def disable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.DisableAdvancedSiteSearchRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the disable advanced site search method over gRPC. + + Downgrade from advanced site search to basic site + search. + + Returns: + Callable[[~.DisableAdvancedSiteSearchRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "disable_advanced_site_search" not in self._stubs: + self._stubs["disable_advanced_site_search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/DisableAdvancedSiteSearch", + request_serializer=site_search_engine_service.DisableAdvancedSiteSearchRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["disable_advanced_site_search"] + + @property + def recrawl_uris( + self, + ) -> Callable[ + [site_search_engine_service.RecrawlUrisRequest], operations_pb2.Operation + ]: + r"""Return a callable for the recrawl uris method over gRPC. + + Request on-demand recrawl for a list of URIs. + + Returns: + Callable[[~.RecrawlUrisRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "recrawl_uris" not in self._stubs: + self._stubs["recrawl_uris"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/RecrawlUris", + request_serializer=site_search_engine_service.RecrawlUrisRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["recrawl_uris"] + + @property + def batch_verify_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchVerifyTargetSitesRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the batch verify target sites method over gRPC. + + Verify target sites' ownership and validity. + This API sends all the target sites under site search + engine for verification. + + Returns: + Callable[[~.BatchVerifyTargetSitesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_verify_target_sites" not in self._stubs: + self._stubs["batch_verify_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/BatchVerifyTargetSites", + request_serializer=site_search_engine_service.BatchVerifyTargetSitesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_verify_target_sites"] + + @property + def fetch_domain_verification_status( + self, + ) -> Callable[ + [site_search_engine_service.FetchDomainVerificationStatusRequest], + site_search_engine_service.FetchDomainVerificationStatusResponse, + ]: + r"""Return a callable for the fetch domain verification + status method over gRPC. + + Returns list of target sites with its domain verification + status. This method can only be called under data store with + BASIC_SITE_SEARCH state at the moment. + + Returns: + Callable[[~.FetchDomainVerificationStatusRequest], + ~.FetchDomainVerificationStatusResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_domain_verification_status" not in self._stubs: + self._stubs[ + "fetch_domain_verification_status" + ] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/FetchDomainVerificationStatus", + request_serializer=site_search_engine_service.FetchDomainVerificationStatusRequest.serialize, + response_deserializer=site_search_engine_service.FetchDomainVerificationStatusResponse.deserialize, + ) + return self._stubs["fetch_domain_verification_status"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SiteSearchEngineServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..7acce98f9786 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/grpc_asyncio.py @@ -0,0 +1,659 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + site_search_engine, + site_search_engine_service, +) + +from .base import DEFAULT_CLIENT_INFO, SiteSearchEngineServiceTransport +from .grpc import SiteSearchEngineServiceGrpcTransport + + +class SiteSearchEngineServiceGrpcAsyncIOTransport(SiteSearchEngineServiceTransport): + """gRPC AsyncIO backend transport for SiteSearchEngineService. + + Service for managing site search related resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def get_site_search_engine( + self, + ) -> Callable[ + [site_search_engine_service.GetSiteSearchEngineRequest], + Awaitable[site_search_engine.SiteSearchEngine], + ]: + r"""Return a callable for the get site search engine method over gRPC. + + Gets the + [SiteSearchEngine][google.cloud.discoveryengine.v1.SiteSearchEngine]. + + Returns: + Callable[[~.GetSiteSearchEngineRequest], + Awaitable[~.SiteSearchEngine]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_site_search_engine" not in self._stubs: + self._stubs["get_site_search_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/GetSiteSearchEngine", + request_serializer=site_search_engine_service.GetSiteSearchEngineRequest.serialize, + response_deserializer=site_search_engine.SiteSearchEngine.deserialize, + ) + return self._stubs["get_site_search_engine"] + + @property + def create_target_site( + self, + ) -> Callable[ + [site_search_engine_service.CreateTargetSiteRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create target site method over gRPC. + + Creates a + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + Returns: + Callable[[~.CreateTargetSiteRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_target_site" not in self._stubs: + self._stubs["create_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/CreateTargetSite", + request_serializer=site_search_engine_service.CreateTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_target_site"] + + @property + def batch_create_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchCreateTargetSitesRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the batch create target sites method over gRPC. + + Creates [TargetSite][google.cloud.discoveryengine.v1.TargetSite] + in a batch. + + Returns: + Callable[[~.BatchCreateTargetSitesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_target_sites" not in self._stubs: + self._stubs["batch_create_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/BatchCreateTargetSites", + request_serializer=site_search_engine_service.BatchCreateTargetSitesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_create_target_sites"] + + @property + def get_target_site( + self, + ) -> Callable[ + [site_search_engine_service.GetTargetSiteRequest], + Awaitable[site_search_engine.TargetSite], + ]: + r"""Return a callable for the get target site method over gRPC. + + Gets a [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + Returns: + Callable[[~.GetTargetSiteRequest], + Awaitable[~.TargetSite]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_target_site" not in self._stubs: + self._stubs["get_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/GetTargetSite", + request_serializer=site_search_engine_service.GetTargetSiteRequest.serialize, + response_deserializer=site_search_engine.TargetSite.deserialize, + ) + return self._stubs["get_target_site"] + + @property + def update_target_site( + self, + ) -> Callable[ + [site_search_engine_service.UpdateTargetSiteRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update target site method over gRPC. + + Updates a + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + Returns: + Callable[[~.UpdateTargetSiteRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_target_site" not in self._stubs: + self._stubs["update_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/UpdateTargetSite", + request_serializer=site_search_engine_service.UpdateTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_target_site"] + + @property + def delete_target_site( + self, + ) -> Callable[ + [site_search_engine_service.DeleteTargetSiteRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete target site method over gRPC. + + Deletes a + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + + Returns: + Callable[[~.DeleteTargetSiteRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_target_site" not in self._stubs: + self._stubs["delete_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/DeleteTargetSite", + request_serializer=site_search_engine_service.DeleteTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_target_site"] + + @property + def list_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.ListTargetSitesRequest], + Awaitable[site_search_engine_service.ListTargetSitesResponse], + ]: + r"""Return a callable for the list target sites method over gRPC. + + Gets a list of + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]s. + + Returns: + Callable[[~.ListTargetSitesRequest], + Awaitable[~.ListTargetSitesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_target_sites" not in self._stubs: + self._stubs["list_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/ListTargetSites", + request_serializer=site_search_engine_service.ListTargetSitesRequest.serialize, + response_deserializer=site_search_engine_service.ListTargetSitesResponse.deserialize, + ) + return self._stubs["list_target_sites"] + + @property + def enable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.EnableAdvancedSiteSearchRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the enable advanced site search method over gRPC. + + Upgrade from basic site search to advanced site + search. + + Returns: + Callable[[~.EnableAdvancedSiteSearchRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enable_advanced_site_search" not in self._stubs: + self._stubs["enable_advanced_site_search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/EnableAdvancedSiteSearch", + request_serializer=site_search_engine_service.EnableAdvancedSiteSearchRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["enable_advanced_site_search"] + + @property + def disable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.DisableAdvancedSiteSearchRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the disable advanced site search method over gRPC. + + Downgrade from advanced site search to basic site + search. + + Returns: + Callable[[~.DisableAdvancedSiteSearchRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "disable_advanced_site_search" not in self._stubs: + self._stubs["disable_advanced_site_search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/DisableAdvancedSiteSearch", + request_serializer=site_search_engine_service.DisableAdvancedSiteSearchRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["disable_advanced_site_search"] + + @property + def recrawl_uris( + self, + ) -> Callable[ + [site_search_engine_service.RecrawlUrisRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the recrawl uris method over gRPC. + + Request on-demand recrawl for a list of URIs. + + Returns: + Callable[[~.RecrawlUrisRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "recrawl_uris" not in self._stubs: + self._stubs["recrawl_uris"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/RecrawlUris", + request_serializer=site_search_engine_service.RecrawlUrisRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["recrawl_uris"] + + @property + def batch_verify_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchVerifyTargetSitesRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the batch verify target sites method over gRPC. + + Verify target sites' ownership and validity. + This API sends all the target sites under site search + engine for verification. + + Returns: + Callable[[~.BatchVerifyTargetSitesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_verify_target_sites" not in self._stubs: + self._stubs["batch_verify_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/BatchVerifyTargetSites", + request_serializer=site_search_engine_service.BatchVerifyTargetSitesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_verify_target_sites"] + + @property + def fetch_domain_verification_status( + self, + ) -> Callable[ + [site_search_engine_service.FetchDomainVerificationStatusRequest], + Awaitable[site_search_engine_service.FetchDomainVerificationStatusResponse], + ]: + r"""Return a callable for the fetch domain verification + status method over gRPC. + + Returns list of target sites with its domain verification + status. This method can only be called under data store with + BASIC_SITE_SEARCH state at the moment. + + Returns: + Callable[[~.FetchDomainVerificationStatusRequest], + Awaitable[~.FetchDomainVerificationStatusResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_domain_verification_status" not in self._stubs: + self._stubs[ + "fetch_domain_verification_status" + ] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SiteSearchEngineService/FetchDomainVerificationStatus", + request_serializer=site_search_engine_service.FetchDomainVerificationStatusRequest.serialize, + response_deserializer=site_search_engine_service.FetchDomainVerificationStatusResponse.deserialize, + ) + return self._stubs["fetch_domain_verification_status"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("SiteSearchEngineServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py new file mode 100644 index 000000000000..7fe61dab5964 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py @@ -0,0 +1,2386 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + site_search_engine, + site_search_engine_service, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SiteSearchEngineServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SiteSearchEngineServiceRestInterceptor: + """Interceptor for SiteSearchEngineService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SiteSearchEngineServiceRestTransport. + + .. code-block:: python + class MyCustomSiteSearchEngineServiceInterceptor(SiteSearchEngineServiceRestInterceptor): + def pre_batch_create_target_sites(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_target_sites(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_batch_verify_target_sites(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_verify_target_sites(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_target_site(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_target_site(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_target_site(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_target_site(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_disable_advanced_site_search(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_disable_advanced_site_search(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_enable_advanced_site_search(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_enable_advanced_site_search(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_domain_verification_status(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_domain_verification_status(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_site_search_engine(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_site_search_engine(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_target_site(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_target_site(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_target_sites(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_target_sites(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_recrawl_uris(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_recrawl_uris(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_target_site(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_target_site(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SiteSearchEngineServiceRestTransport(interceptor=MyCustomSiteSearchEngineServiceInterceptor()) + client = SiteSearchEngineServiceClient(transport=transport) + + + """ + + def pre_batch_create_target_sites( + self, + request: site_search_engine_service.BatchCreateTargetSitesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.BatchCreateTargetSitesRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for batch_create_target_sites + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_batch_create_target_sites( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for batch_create_target_sites + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_batch_verify_target_sites( + self, + request: site_search_engine_service.BatchVerifyTargetSitesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.BatchVerifyTargetSitesRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for batch_verify_target_sites + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_batch_verify_target_sites( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for batch_verify_target_sites + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_create_target_site( + self, + request: site_search_engine_service.CreateTargetSiteRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.CreateTargetSiteRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_target_site + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_create_target_site( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_target_site + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_delete_target_site( + self, + request: site_search_engine_service.DeleteTargetSiteRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.DeleteTargetSiteRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_target_site + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_delete_target_site( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_target_site + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_disable_advanced_site_search( + self, + request: site_search_engine_service.DisableAdvancedSiteSearchRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.DisableAdvancedSiteSearchRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for disable_advanced_site_search + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_disable_advanced_site_search( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for disable_advanced_site_search + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_enable_advanced_site_search( + self, + request: site_search_engine_service.EnableAdvancedSiteSearchRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.EnableAdvancedSiteSearchRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for enable_advanced_site_search + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_enable_advanced_site_search( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for enable_advanced_site_search + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_fetch_domain_verification_status( + self, + request: site_search_engine_service.FetchDomainVerificationStatusRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.FetchDomainVerificationStatusRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for fetch_domain_verification_status + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_fetch_domain_verification_status( + self, response: site_search_engine_service.FetchDomainVerificationStatusResponse + ) -> site_search_engine_service.FetchDomainVerificationStatusResponse: + """Post-rpc interceptor for fetch_domain_verification_status + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_get_site_search_engine( + self, + request: site_search_engine_service.GetSiteSearchEngineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.GetSiteSearchEngineRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_site_search_engine + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_get_site_search_engine( + self, response: site_search_engine.SiteSearchEngine + ) -> site_search_engine.SiteSearchEngine: + """Post-rpc interceptor for get_site_search_engine + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_get_target_site( + self, + request: site_search_engine_service.GetTargetSiteRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.GetTargetSiteRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_target_site + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_get_target_site( + self, response: site_search_engine.TargetSite + ) -> site_search_engine.TargetSite: + """Post-rpc interceptor for get_target_site + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_list_target_sites( + self, + request: site_search_engine_service.ListTargetSitesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.ListTargetSitesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_target_sites + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_list_target_sites( + self, response: site_search_engine_service.ListTargetSitesResponse + ) -> site_search_engine_service.ListTargetSitesResponse: + """Post-rpc interceptor for list_target_sites + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_recrawl_uris( + self, + request: site_search_engine_service.RecrawlUrisRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.RecrawlUrisRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for recrawl_uris + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_recrawl_uris( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for recrawl_uris + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_update_target_site( + self, + request: site_search_engine_service.UpdateTargetSiteRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.UpdateTargetSiteRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_target_site + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_update_target_site( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_target_site + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SiteSearchEngineServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SiteSearchEngineServiceRestInterceptor + + +class SiteSearchEngineServiceRestTransport(SiteSearchEngineServiceTransport): + """REST backend transport for SiteSearchEngineService. + + Service for managing site search related resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SiteSearchEngineServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SiteSearchEngineServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _BatchCreateTargetSites(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("BatchCreateTargetSites") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.BatchCreateTargetSitesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the batch create target sites method over HTTP. + + Args: + request (~.site_search_engine_service.BatchCreateTargetSitesRequest): + The request object. Request message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*/siteSearchEngine}/targetSites:batchCreate", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/targetSites:batchCreate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_create_target_sites( + request, metadata + ) + pb_request = site_search_engine_service.BatchCreateTargetSitesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_create_target_sites(resp) + return resp + + class _BatchVerifyTargetSites(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("BatchVerifyTargetSites") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.BatchVerifyTargetSitesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the batch verify target sites method over HTTP. + + Args: + request (~.site_search_engine_service.BatchVerifyTargetSitesRequest): + The request object. Request message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}:batchVerifyTargetSites", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_verify_target_sites( + request, metadata + ) + pb_request = site_search_engine_service.BatchVerifyTargetSitesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_verify_target_sites(resp) + return resp + + class _CreateTargetSite(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("CreateTargetSite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.CreateTargetSiteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create target site method over HTTP. + + Args: + request (~.site_search_engine_service.CreateTargetSiteRequest): + The request object. Request message for + [SiteSearchEngineService.CreateTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.CreateTargetSite] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*/siteSearchEngine}/targetSites", + "body": "target_site", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/targetSites", + "body": "target_site", + }, + ] + request, metadata = self._interceptor.pre_create_target_site( + request, metadata + ) + pb_request = site_search_engine_service.CreateTargetSiteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_target_site(resp) + return resp + + class _DeleteTargetSite(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("DeleteTargetSite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.DeleteTargetSiteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete target site method over HTTP. + + Args: + request (~.site_search_engine_service.DeleteTargetSiteRequest): + The request object. Request message for + [SiteSearchEngineService.DeleteTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.DeleteTargetSite] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_target_site( + request, metadata + ) + pb_request = site_search_engine_service.DeleteTargetSiteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_target_site(resp) + return resp + + class _DisableAdvancedSiteSearch(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("DisableAdvancedSiteSearch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.DisableAdvancedSiteSearchRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the disable advanced site + search method over HTTP. + + Args: + request (~.site_search_engine_service.DisableAdvancedSiteSearchRequest): + The request object. Request message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{site_search_engine=projects/*/locations/*/dataStores/*/siteSearchEngine}:disableAdvancedSiteSearch", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{site_search_engine=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}:disableAdvancedSiteSearch", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_disable_advanced_site_search( + request, metadata + ) + pb_request = site_search_engine_service.DisableAdvancedSiteSearchRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_disable_advanced_site_search(resp) + return resp + + class _EnableAdvancedSiteSearch(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("EnableAdvancedSiteSearch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.EnableAdvancedSiteSearchRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the enable advanced site + search method over HTTP. + + Args: + request (~.site_search_engine_service.EnableAdvancedSiteSearchRequest): + The request object. Request message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{site_search_engine=projects/*/locations/*/dataStores/*/siteSearchEngine}:enableAdvancedSiteSearch", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{site_search_engine=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}:enableAdvancedSiteSearch", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_enable_advanced_site_search( + request, metadata + ) + pb_request = site_search_engine_service.EnableAdvancedSiteSearchRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_enable_advanced_site_search(resp) + return resp + + class _FetchDomainVerificationStatus(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("FetchDomainVerificationStatus") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.FetchDomainVerificationStatusRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine_service.FetchDomainVerificationStatusResponse: + r"""Call the fetch domain verification + status method over HTTP. + + Args: + request (~.site_search_engine_service.FetchDomainVerificationStatusRequest): + The request object. Request message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.site_search_engine_service.FetchDomainVerificationStatusResponse: + Response message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{site_search_engine=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}:fetchDomainVerificationStatus", + }, + ] + request, metadata = self._interceptor.pre_fetch_domain_verification_status( + request, metadata + ) + pb_request = ( + site_search_engine_service.FetchDomainVerificationStatusRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = site_search_engine_service.FetchDomainVerificationStatusResponse() + pb_resp = ( + site_search_engine_service.FetchDomainVerificationStatusResponse.pb( + resp + ) + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_domain_verification_status(resp) + return resp + + class _GetSiteSearchEngine(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("GetSiteSearchEngine") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.GetSiteSearchEngineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.SiteSearchEngine: + r"""Call the get site search engine method over HTTP. + + Args: + request (~.site_search_engine_service.GetSiteSearchEngineRequest): + The request object. Request message for + [SiteSearchEngineService.GetSiteSearchEngine][google.cloud.discoveryengine.v1.SiteSearchEngineService.GetSiteSearchEngine] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.site_search_engine.SiteSearchEngine: + SiteSearchEngine captures DataStore + level site search persisting + configurations. It is a singleton value + per data store. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/siteSearchEngine}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}", + }, + ] + request, metadata = self._interceptor.pre_get_site_search_engine( + request, metadata + ) + pb_request = site_search_engine_service.GetSiteSearchEngineRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = site_search_engine.SiteSearchEngine() + pb_resp = site_search_engine.SiteSearchEngine.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_site_search_engine(resp) + return resp + + class _GetTargetSite(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("GetTargetSite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.GetTargetSiteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.TargetSite: + r"""Call the get target site method over HTTP. + + Args: + request (~.site_search_engine_service.GetTargetSiteRequest): + The request object. Request message for + [SiteSearchEngineService.GetTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.GetTargetSite] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.site_search_engine.TargetSite: + A target site for the + SiteSearchEngine. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/*}", + }, + ] + request, metadata = self._interceptor.pre_get_target_site(request, metadata) + pb_request = site_search_engine_service.GetTargetSiteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = site_search_engine.TargetSite() + pb_resp = site_search_engine.TargetSite.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_target_site(resp) + return resp + + class _ListTargetSites(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("ListTargetSites") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.ListTargetSitesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine_service.ListTargetSitesResponse: + r"""Call the list target sites method over HTTP. + + Args: + request (~.site_search_engine_service.ListTargetSitesRequest): + The request object. Request message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.ListTargetSites] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.site_search_engine_service.ListTargetSitesResponse: + Response message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.ListTargetSites] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*/siteSearchEngine}/targetSites", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/targetSites", + }, + ] + request, metadata = self._interceptor.pre_list_target_sites( + request, metadata + ) + pb_request = site_search_engine_service.ListTargetSitesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = site_search_engine_service.ListTargetSitesResponse() + pb_resp = site_search_engine_service.ListTargetSitesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_target_sites(resp) + return resp + + class _RecrawlUris(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("RecrawlUris") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.RecrawlUrisRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the recrawl uris method over HTTP. + + Args: + request (~.site_search_engine_service.RecrawlUrisRequest): + The request object. Request message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1.SiteSearchEngineService.RecrawlUris] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{site_search_engine=projects/*/locations/*/dataStores/*/siteSearchEngine}:recrawlUris", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{site_search_engine=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}:recrawlUris", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_recrawl_uris(request, metadata) + pb_request = site_search_engine_service.RecrawlUrisRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_recrawl_uris(resp) + return resp + + class _UpdateTargetSite(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("UpdateTargetSite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.UpdateTargetSiteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update target site method over HTTP. + + Args: + request (~.site_search_engine_service.UpdateTargetSiteRequest): + The request object. Request message for + [SiteSearchEngineService.UpdateTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.UpdateTargetSite] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{target_site.name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}", + "body": "target_site", + }, + { + "method": "patch", + "uri": "/v1/{target_site.name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/*}", + "body": "target_site", + }, + ] + request, metadata = self._interceptor.pre_update_target_site( + request, metadata + ) + pb_request = site_search_engine_service.UpdateTargetSiteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_target_site(resp) + return resp + + @property + def batch_create_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchCreateTargetSitesRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateTargetSites(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_verify_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchVerifyTargetSitesRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchVerifyTargetSites(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_target_site( + self, + ) -> Callable[ + [site_search_engine_service.CreateTargetSiteRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTargetSite(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_target_site( + self, + ) -> Callable[ + [site_search_engine_service.DeleteTargetSiteRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTargetSite(self._session, self._host, self._interceptor) # type: ignore + + @property + def disable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.DisableAdvancedSiteSearchRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DisableAdvancedSiteSearch(self._session, self._host, self._interceptor) # type: ignore + + @property + def enable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.EnableAdvancedSiteSearchRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnableAdvancedSiteSearch(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_domain_verification_status( + self, + ) -> Callable[ + [site_search_engine_service.FetchDomainVerificationStatusRequest], + site_search_engine_service.FetchDomainVerificationStatusResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchDomainVerificationStatus(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_site_search_engine( + self, + ) -> Callable[ + [site_search_engine_service.GetSiteSearchEngineRequest], + site_search_engine.SiteSearchEngine, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSiteSearchEngine(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_target_site( + self, + ) -> Callable[ + [site_search_engine_service.GetTargetSiteRequest], site_search_engine.TargetSite + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTargetSite(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.ListTargetSitesRequest], + site_search_engine_service.ListTargetSitesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTargetSites(self._session, self._host, self._interceptor) # type: ignore + + @property + def recrawl_uris( + self, + ) -> Callable[ + [site_search_engine_service.RecrawlUrisRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RecrawlUris(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_target_site( + self, + ) -> Callable[ + [site_search_engine_service.UpdateTargetSiteRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTargetSite(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(SiteSearchEngineServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(SiteSearchEngineServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SiteSearchEngineServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py index b937466f5e1d..78a8532acdee 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py @@ -336,6 +336,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -394,6 +398,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", @@ -877,6 +885,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -995,6 +1007,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py index a3dd913c4aec..a45cecf70deb 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py @@ -13,7 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .common import CustomAttribute, Interval, UserInfo +from .common import ( + CustomAttribute, + IndustryVertical, + Interval, + SearchAddOn, + SearchTier, + SolutionType, + UserInfo, +) +from .completion import SuggestionDenyListEntry from .completion_service import CompleteQueryRequest, CompleteQueryResponse from .conversation import ( Conversation, @@ -32,6 +41,17 @@ ListConversationsResponse, UpdateConversationRequest, ) +from .data_store import DataStore +from .data_store_service import ( + CreateDataStoreMetadata, + CreateDataStoreRequest, + DeleteDataStoreMetadata, + DeleteDataStoreRequest, + GetDataStoreRequest, + ListDataStoresRequest, + ListDataStoresResponse, + UpdateDataStoreRequest, +) from .document import Document from .document_service import ( CreateDocumentRequest, @@ -41,6 +61,17 @@ ListDocumentsResponse, UpdateDocumentRequest, ) +from .engine import Engine +from .engine_service import ( + CreateEngineMetadata, + CreateEngineRequest, + DeleteEngineMetadata, + DeleteEngineRequest, + GetEngineRequest, + ListEnginesRequest, + ListEnginesResponse, + UpdateEngineRequest, +) from .import_config import ( BigQuerySource, GcsSource, @@ -48,6 +79,9 @@ ImportDocumentsRequest, ImportDocumentsResponse, ImportErrorConfig, + ImportSuggestionDenyListEntriesMetadata, + ImportSuggestionDenyListEntriesRequest, + ImportSuggestionDenyListEntriesResponse, ImportUserEventsMetadata, ImportUserEventsRequest, ImportUserEventsResponse, @@ -56,6 +90,9 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeSuggestionDenyListEntriesMetadata, + PurgeSuggestionDenyListEntriesRequest, + PurgeSuggestionDenyListEntriesResponse, ) from .schema import Schema from .schema_service import ( @@ -70,6 +107,36 @@ UpdateSchemaRequest, ) from .search_service import SearchRequest, SearchResponse +from .site_search_engine import SiteSearchEngine, SiteVerificationInfo, TargetSite +from .site_search_engine_service import ( + BatchCreateTargetSiteMetadata, + BatchCreateTargetSitesRequest, + BatchCreateTargetSitesResponse, + BatchVerifyTargetSitesMetadata, + BatchVerifyTargetSitesRequest, + BatchVerifyTargetSitesResponse, + CreateTargetSiteMetadata, + CreateTargetSiteRequest, + DeleteTargetSiteMetadata, + DeleteTargetSiteRequest, + DisableAdvancedSiteSearchMetadata, + DisableAdvancedSiteSearchRequest, + DisableAdvancedSiteSearchResponse, + EnableAdvancedSiteSearchMetadata, + EnableAdvancedSiteSearchRequest, + EnableAdvancedSiteSearchResponse, + FetchDomainVerificationStatusRequest, + FetchDomainVerificationStatusResponse, + GetSiteSearchEngineRequest, + GetTargetSiteRequest, + ListTargetSitesRequest, + ListTargetSitesResponse, + RecrawlUrisMetadata, + RecrawlUrisRequest, + RecrawlUrisResponse, + UpdateTargetSiteMetadata, + UpdateTargetSiteRequest, +) from .user_event import ( CompletionInfo, DocumentInfo, @@ -86,6 +153,11 @@ "CustomAttribute", "Interval", "UserInfo", + "IndustryVertical", + "SearchAddOn", + "SearchTier", + "SolutionType", + "SuggestionDenyListEntry", "CompleteQueryRequest", "CompleteQueryResponse", "Conversation", @@ -101,6 +173,15 @@ "ListConversationsRequest", "ListConversationsResponse", "UpdateConversationRequest", + "DataStore", + "CreateDataStoreMetadata", + "CreateDataStoreRequest", + "DeleteDataStoreMetadata", + "DeleteDataStoreRequest", + "GetDataStoreRequest", + "ListDataStoresRequest", + "ListDataStoresResponse", + "UpdateDataStoreRequest", "Document", "CreateDocumentRequest", "DeleteDocumentRequest", @@ -108,18 +189,33 @@ "ListDocumentsRequest", "ListDocumentsResponse", "UpdateDocumentRequest", + "Engine", + "CreateEngineMetadata", + "CreateEngineRequest", + "DeleteEngineMetadata", + "DeleteEngineRequest", + "GetEngineRequest", + "ListEnginesRequest", + "ListEnginesResponse", + "UpdateEngineRequest", "BigQuerySource", "GcsSource", "ImportDocumentsMetadata", "ImportDocumentsRequest", "ImportDocumentsResponse", "ImportErrorConfig", + "ImportSuggestionDenyListEntriesMetadata", + "ImportSuggestionDenyListEntriesRequest", + "ImportSuggestionDenyListEntriesResponse", "ImportUserEventsMetadata", "ImportUserEventsRequest", "ImportUserEventsResponse", "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeSuggestionDenyListEntriesMetadata", + "PurgeSuggestionDenyListEntriesRequest", + "PurgeSuggestionDenyListEntriesResponse", "Schema", "CreateSchemaMetadata", "CreateSchemaRequest", @@ -132,6 +228,36 @@ "UpdateSchemaRequest", "SearchRequest", "SearchResponse", + "SiteSearchEngine", + "SiteVerificationInfo", + "TargetSite", + "BatchCreateTargetSiteMetadata", + "BatchCreateTargetSitesRequest", + "BatchCreateTargetSitesResponse", + "BatchVerifyTargetSitesMetadata", + "BatchVerifyTargetSitesRequest", + "BatchVerifyTargetSitesResponse", + "CreateTargetSiteMetadata", + "CreateTargetSiteRequest", + "DeleteTargetSiteMetadata", + "DeleteTargetSiteRequest", + "DisableAdvancedSiteSearchMetadata", + "DisableAdvancedSiteSearchRequest", + "DisableAdvancedSiteSearchResponse", + "EnableAdvancedSiteSearchMetadata", + "EnableAdvancedSiteSearchRequest", + "EnableAdvancedSiteSearchResponse", + "FetchDomainVerificationStatusRequest", + "FetchDomainVerificationStatusResponse", + "GetSiteSearchEngineRequest", + "GetTargetSiteRequest", + "ListTargetSitesRequest", + "ListTargetSitesResponse", + "RecrawlUrisMetadata", + "RecrawlUrisRequest", + "RecrawlUrisResponse", + "UpdateTargetSiteMetadata", + "UpdateTargetSiteRequest", "CompletionInfo", "DocumentInfo", "MediaInfo", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/common.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/common.py index bae8178b7fa6..ccf7c5ca8ce9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/common.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/common.py @@ -22,6 +22,10 @@ __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1", manifest={ + "IndustryVertical", + "SolutionType", + "SearchTier", + "SearchAddOn", "Interval", "CustomAttribute", "UserInfo", @@ -29,6 +33,77 @@ ) +class IndustryVertical(proto.Enum): + r"""The industry vertical associated with the + [DataStore][google.cloud.discoveryengine.v1.DataStore]. + + Values: + INDUSTRY_VERTICAL_UNSPECIFIED (0): + Value used when unset. + GENERIC (1): + The generic vertical for documents that are + not specific to any industry vertical. + MEDIA (2): + The media industry vertical. + """ + INDUSTRY_VERTICAL_UNSPECIFIED = 0 + GENERIC = 1 + MEDIA = 2 + + +class SolutionType(proto.Enum): + r"""The type of solution. + + Values: + SOLUTION_TYPE_UNSPECIFIED (0): + Default value. + SOLUTION_TYPE_RECOMMENDATION (1): + Used for Recommendations AI. + SOLUTION_TYPE_SEARCH (2): + Used for Discovery Search. + SOLUTION_TYPE_CHAT (3): + Used for use cases related to the Generative + AI agent. + """ + SOLUTION_TYPE_UNSPECIFIED = 0 + SOLUTION_TYPE_RECOMMENDATION = 1 + SOLUTION_TYPE_SEARCH = 2 + SOLUTION_TYPE_CHAT = 3 + + +class SearchTier(proto.Enum): + r"""Tiers of search features. Different tiers might have + different pricing. To learn more, please check the pricing + documentation. + + Values: + SEARCH_TIER_UNSPECIFIED (0): + Default value when the enum is unspecified. + This is invalid to use. + SEARCH_TIER_STANDARD (1): + Standard tier. + SEARCH_TIER_ENTERPRISE (2): + Enterprise tier. + """ + SEARCH_TIER_UNSPECIFIED = 0 + SEARCH_TIER_STANDARD = 1 + SEARCH_TIER_ENTERPRISE = 2 + + +class SearchAddOn(proto.Enum): + r"""Add-on that provides additional functionality for search. + + Values: + SEARCH_ADD_ON_UNSPECIFIED (0): + Default value when the enum is unspecified. + This is invalid to use. + SEARCH_ADD_ON_LLM (1): + Large language model add-on. + """ + SEARCH_ADD_ON_UNSPECIFIED = 0 + SEARCH_ADD_ON_LLM = 1 + + class Interval(proto.Message): r"""A floating point interval. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/completion.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/completion.py new file mode 100644 index 000000000000..f631910e0109 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/completion.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "SuggestionDenyListEntry", + }, +) + + +class SuggestionDenyListEntry(proto.Message): + r"""Suggestion deny list entry identifying the phrase to block + from suggestions and the applied operation for the phrase. + + Attributes: + block_phrase (str): + Required. Phrase to block from suggestions + served. Can be maximum 125 characters. + match_operator (google.cloud.discoveryengine_v1.types.SuggestionDenyListEntry.MatchOperator): + Required. The match operator to apply for + this phrase. Whether to block the exact phrase, + or block any suggestions containing this phrase. + """ + + class MatchOperator(proto.Enum): + r"""Operator for matching with the generated suggestions. + + Values: + MATCH_OPERATOR_UNSPECIFIED (0): + Default value. Should not be used + EXACT_MATCH (1): + If the suggestion is an exact match to the block_phrase, + then block it. + CONTAINS (2): + If the suggestion contains the block_phrase, then block it. + """ + MATCH_OPERATOR_UNSPECIFIED = 0 + EXACT_MATCH = 1 + CONTAINS = 2 + + block_phrase: str = proto.Field( + proto.STRING, + number=1, + ) + match_operator: MatchOperator = proto.Field( + proto.ENUM, + number=2, + enum=MatchOperator, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/completion_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/completion_service.py index 69a59aa9258b..34d9bba9af07 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/completion_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/completion_service.py @@ -42,8 +42,9 @@ class CompleteQueryRequest(proto.Message): Required. The typeahead input used to fetch suggestions. Maximum length is 128 characters. query_model (str): - Selects data model of query suggestions for serving. - Currently supported values: + Specifies the autocomplete data model. This overrides any + model specified in the Configuration > Autocomplete section + of the Cloud console. Currently supported values: - ``document`` - Using suggestions generated from user-imported documents. @@ -61,8 +62,8 @@ class CompleteQueryRequest(proto.Message): Default values: - ``document`` is the default model for regular dataStores. - - ``search-history`` is the default model for - [IndustryVertical.SITE_SEARCH][] dataStores. + - ``search-history`` is the default model for site search + dataStores. user_pseudo_id (str): A unique identifier for tracking visitors. For example, this could be implemented with an HTTP cookie, which should be @@ -136,12 +137,23 @@ class QuerySuggestion(proto.Message): Attributes: suggestion (str): The suggestion for the query. + completable_field_paths (MutableSequence[str]): + The unique document field paths that serve as + the source of this suggestion if it was + generated from completable fields. + + This field is only populated for the + document-completable model. """ suggestion: str = proto.Field( proto.STRING, number=1, ) + completable_field_paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) query_suggestions: MutableSequence[QuerySuggestion] = proto.RepeatedField( proto.MESSAGE, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversation.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversation.py index 6392498273c5..d498e8b9f3ed 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversation.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversation.py @@ -41,6 +41,8 @@ class Conversation(proto.Message): name (str): Immutable. Fully qualified name ``project/*/locations/global/collections/{collection}/dataStore/*/conversations/*`` + or + ``project/*/locations/global/collections/{collection}/engines/*/conversations/*``. state (google.cloud.discoveryengine_v1.types.Conversation.State): The state of the Conversation. user_pseudo_id (str): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py index b4a7634c7d5c..93b785515bc0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py @@ -90,6 +90,26 @@ class ConverseConversationRequest(proto.Message): summary_spec (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.SummarySpec): A specification for configuring the summary returned in the response. + filter (str): + The filter syntax consists of an expression language for + constructing a predicate from one or more fields of the + documents being filtered. Filter expression is + case-sensitive. This will be used to filter search results + which may affect the summary response. + + If this field is unrecognizable, an ``INVALID_ARGUMENT`` is + returned. + + Filtering in Vertex AI Search is done by mapping the LHS + filter key to a key property defined in the Vertex AI Search + backend -- this mapping is defined by the customer in their + schema. For example a media customer might have a field + 'name' in their schema. In this case the filter would look + like this: filter --> name:'ANY("king kong")' + + For more information about filtering including syntax and + filter operators, see + `Filter `__ """ name: str = proto.Field( @@ -126,6 +146,10 @@ class ConverseConversationRequest(proto.Message): message=search_service.SearchRequest.ContentSearchSpec.SummarySpec, ) ) + filter: str = proto.Field( + proto.STRING, + number=9, + ) class ConverseConversationResponse(proto.Message): @@ -194,7 +218,7 @@ class UpdateConversationRequest(proto.Message): [Conversation][google.cloud.discoveryengine.v1.Conversation] to update. The following are NOT supported: - - [conversation.name][] + - [Conversation.name][google.cloud.discoveryengine.v1.Conversation.name] If not set or empty, all supported fields are updated. """ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store.py new file mode 100644 index 000000000000..6ab4da6149f9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import common + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "DataStore", + }, +) + + +class DataStore(proto.Message): + r"""DataStore captures global settings and configs at the + DataStore level. + + Attributes: + name (str): + Immutable. The full resource name of the data store. Format: + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + This field must be a UTF-8 encoded string with a length + limit of 1024 characters. + display_name (str): + Required. The data store display name. + + This field must be a UTF-8 encoded string with a length + limit of 128 characters. Otherwise, an INVALID_ARGUMENT + error is returned. + industry_vertical (google.cloud.discoveryengine_v1.types.IndustryVertical): + Immutable. The industry vertical that the + data store registers. + solution_types (MutableSequence[google.cloud.discoveryengine_v1.types.SolutionType]): + The solutions that the data store enrolls. Available + solutions for each + [industry_vertical][google.cloud.discoveryengine.v1.DataStore.industry_vertical]: + + - ``MEDIA``: ``SOLUTION_TYPE_RECOMMENDATION`` and + ``SOLUTION_TYPE_SEARCH``. + - ``SITE_SEARCH``: ``SOLUTION_TYPE_SEARCH`` is + automatically enrolled. Other solutions cannot be + enrolled. + default_schema_id (str): + Output only. The id of the default + [Schema][google.cloud.discoveryengine.v1.Schema] asscociated + to this data store. + content_config (google.cloud.discoveryengine_v1.types.DataStore.ContentConfig): + Immutable. The content config of the data store. If this + field is unset, the server behavior defaults to + [ContentConfig.NO_CONTENT][google.cloud.discoveryengine.v1.DataStore.ContentConfig.NO_CONTENT]. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp the + [DataStore][google.cloud.discoveryengine.v1.DataStore] was + created at. + """ + + class ContentConfig(proto.Enum): + r"""Content config of the data store. + + Values: + CONTENT_CONFIG_UNSPECIFIED (0): + Default value. + NO_CONTENT (1): + Only contains documents without any + [Document.content][google.cloud.discoveryengine.v1.Document.content]. + CONTENT_REQUIRED (2): + Only contains documents with + [Document.content][google.cloud.discoveryengine.v1.Document.content]. + PUBLIC_WEBSITE (3): + The data store is used for public website + search. + """ + CONTENT_CONFIG_UNSPECIFIED = 0 + NO_CONTENT = 1 + CONTENT_REQUIRED = 2 + PUBLIC_WEBSITE = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + industry_vertical: common.IndustryVertical = proto.Field( + proto.ENUM, + number=3, + enum=common.IndustryVertical, + ) + solution_types: MutableSequence[common.SolutionType] = proto.RepeatedField( + proto.ENUM, + number=5, + enum=common.SolutionType, + ) + default_schema_id: str = proto.Field( + proto.STRING, + number=7, + ) + content_config: ContentConfig = proto.Field( + proto.ENUM, + number=6, + enum=ContentConfig, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py new file mode 100644 index 000000000000..5d35b5d3cd62 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py @@ -0,0 +1,325 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import data_store as gcd_data_store + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "CreateDataStoreRequest", + "GetDataStoreRequest", + "CreateDataStoreMetadata", + "ListDataStoresRequest", + "ListDataStoresResponse", + "DeleteDataStoreRequest", + "UpdateDataStoreRequest", + "DeleteDataStoreMetadata", + }, +) + + +class CreateDataStoreRequest(proto.Message): + r"""Request for + [DataStoreService.CreateDataStore][google.cloud.discoveryengine.v1.DataStoreService.CreateDataStore] + method. + + Attributes: + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}``. + data_store (google.cloud.discoveryengine_v1.types.DataStore): + Required. The + [DataStore][google.cloud.discoveryengine.v1.DataStore] to + create. + data_store_id (str): + Required. The ID to use for the + [DataStore][google.cloud.discoveryengine.v1.DataStore], + which will become the final component of the + [DataStore][google.cloud.discoveryengine.v1.DataStore]'s + resource name. + + This field must conform to + `RFC-1034 `__ standard + with a length limit of 63 characters. Otherwise, an + INVALID_ARGUMENT error is returned. + create_advanced_site_search (bool): + A boolean flag indicating whether user want to directly + create an advanced data store for site search. If the data + store is not configured as site search (GENERIC vertical and + PUBLIC_WEBSITE content_config), this flag will be ignored. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + data_store: gcd_data_store.DataStore = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_data_store.DataStore, + ) + data_store_id: str = proto.Field( + proto.STRING, + number=3, + ) + create_advanced_site_search: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetDataStoreRequest(proto.Message): + r"""Request message for + [DataStoreService.GetDataStore][google.cloud.discoveryengine.v1.DataStoreService.GetDataStore] + method. + + Attributes: + name (str): + Required. Full resource name of + [DataStore][google.cloud.discoveryengine.v1.DataStore], such + as + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + If the caller does not have permission to access the + [DataStore][google.cloud.discoveryengine.v1.DataStore], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the requested + [DataStore][google.cloud.discoveryengine.v1.DataStore] does + not exist, a NOT_FOUND error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDataStoreMetadata(proto.Message): + r"""Metadata related to the progress of the + [DataStoreService.CreateDataStore][google.cloud.discoveryengine.v1.DataStoreService.CreateDataStore] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class ListDataStoresRequest(proto.Message): + r"""Request message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1.DataStoreService.ListDataStores] + method. + + Attributes: + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection_id}``. + + If the caller does not have permission to list + [DataStore][google.cloud.discoveryengine.v1.DataStore]s + under this location, regardless of whether or not this data + store exists, a PERMISSION_DENIED error is returned. + page_size (int): + Maximum number of + [DataStore][google.cloud.discoveryengine.v1.DataStore]s to + return. If unspecified, defaults to 10. The maximum allowed + value is 50. Values above 50 will be coerced to 50. + + If this field is negative, an INVALID_ARGUMENT is returned. + page_token (str): + A page token + [ListDataStoresResponse.next_page_token][google.cloud.discoveryengine.v1.ListDataStoresResponse.next_page_token], + received from a previous + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1.DataStoreService.ListDataStores] + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1.DataStoreService.ListDataStores] + must match the call that provided the page token. Otherwise, + an INVALID_ARGUMENT error is returned. + filter (str): + Filter by solution type. For example: filter = + 'solution_type:SOLUTION_TYPE_SEARCH' + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListDataStoresResponse(proto.Message): + r"""Response message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1.DataStoreService.ListDataStores] + method. + + Attributes: + data_stores (MutableSequence[google.cloud.discoveryengine_v1.types.DataStore]): + All the customer's + [DataStore][google.cloud.discoveryengine.v1.DataStore]s. + next_page_token (str): + A token that can be sent as + [ListDataStoresRequest.page_token][google.cloud.discoveryengine.v1.ListDataStoresRequest.page_token] + to retrieve the next page. If this field is omitted, there + are no subsequent pages. + """ + + @property + def raw_page(self): + return self + + data_stores: MutableSequence[gcd_data_store.DataStore] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_data_store.DataStore, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteDataStoreRequest(proto.Message): + r"""Request message for + [DataStoreService.DeleteDataStore][google.cloud.discoveryengine.v1.DataStoreService.DeleteDataStore] + method. + + Attributes: + name (str): + Required. Full resource name of + [DataStore][google.cloud.discoveryengine.v1.DataStore], such + as + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + If the caller does not have permission to delete the + [DataStore][google.cloud.discoveryengine.v1.DataStore], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the + [DataStore][google.cloud.discoveryengine.v1.DataStore] to + delete does not exist, a NOT_FOUND error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDataStoreRequest(proto.Message): + r"""Request message for + [DataStoreService.UpdateDataStore][google.cloud.discoveryengine.v1.DataStoreService.UpdateDataStore] + method. + + Attributes: + data_store (google.cloud.discoveryengine_v1.types.DataStore): + Required. The + [DataStore][google.cloud.discoveryengine.v1.DataStore] to + update. + + If the caller does not have permission to update the + [DataStore][google.cloud.discoveryengine.v1.DataStore], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the + [DataStore][google.cloud.discoveryengine.v1.DataStore] to + update does not exist, a NOT_FOUND error is returned. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [DataStore][google.cloud.discoveryengine.v1.DataStore] to + update. + + If an unsupported or unknown field is provided, an + INVALID_ARGUMENT error is returned. + """ + + data_store: gcd_data_store.DataStore = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_data_store.DataStore, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteDataStoreMetadata(proto.Message): + r"""Metadata related to the progress of the + [DataStoreService.DeleteDataStore][google.cloud.discoveryengine.v1.DataStoreService.DeleteDataStore] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py index b5bf7f5d5050..41b747488cce 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py @@ -111,7 +111,8 @@ class Content(proto.Message): uri (str): The URI of the content. Only Cloud Storage URIs (e.g. ``gs://bucket-name/path/to/file``) are supported. The - maximum file size is 100 MB. + maximum file size is 2.5 MB for text-based formats, 100 MB + for other formats. This field is a member of `oneof`_ ``content``. mime_type (str): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/engine.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/engine.py new file mode 100644 index 000000000000..383dbd1a86f7 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/engine.py @@ -0,0 +1,341 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import common + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "Engine", + }, +) + + +class Engine(proto.Message): + r"""Metadata that describes the training and serving parameters of an + [Engine][google.cloud.discoveryengine.v1.Engine]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + chat_engine_config (google.cloud.discoveryengine_v1.types.Engine.ChatEngineConfig): + Configurations for the Chat Engine. Only applicable if + [solution_type][google.cloud.discoveryengine.v1.Engine.solution_type] + is + [SOLUTION_TYPE_CHAT][google.cloud.discoveryengine.v1.SolutionType.SOLUTION_TYPE_CHAT]. + + This field is a member of `oneof`_ ``engine_config``. + search_engine_config (google.cloud.discoveryengine_v1.types.Engine.SearchEngineConfig): + Configurations for the Search Engine. Only applicable if + [solution_type][google.cloud.discoveryengine.v1.Engine.solution_type] + is + [SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1.SolutionType.SOLUTION_TYPE_SEARCH]. + + This field is a member of `oneof`_ ``engine_config``. + chat_engine_metadata (google.cloud.discoveryengine_v1.types.Engine.ChatEngineMetadata): + Output only. Additional information of the Chat Engine. Only + applicable if + [solution_type][google.cloud.discoveryengine.v1.Engine.solution_type] + is + [SOLUTION_TYPE_CHAT][google.cloud.discoveryengine.v1.SolutionType.SOLUTION_TYPE_CHAT]. + + This field is a member of `oneof`_ ``engine_metadata``. + name (str): + Immutable. The fully qualified resource name of the engine. + + This field must be a UTF-8 encoded string with a length + limit of 1024 characters. + + Format: + ``projects/{project_number}/locations/{location}/collections/{collection}/engines/{engine}`` + engine should be 1-63 characters, and valid characters are + /[a-z0-9][a-z0-9-_]*/. Otherwise, an INVALID_ARGUMENT error + is returned. + display_name (str): + Required. The display name of the engine. + Should be human readable. UTF-8 encoded string + with limit of 1024 characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp the Recommendation + Engine was created at. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp the Recommendation + Engine was last updated. + data_store_ids (MutableSequence[str]): + The data stores associated with this engine. + + For + [SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1.SolutionType.SOLUTION_TYPE_SEARCH] + and + [SOLUTION_TYPE_RECOMMENDATION][google.cloud.discoveryengine.v1.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + type of engines, they can only associate with at most one + data store. + + If + [solution_type][google.cloud.discoveryengine.v1.Engine.solution_type] + is + [SOLUTION_TYPE_CHAT][google.cloud.discoveryengine.v1.SolutionType.SOLUTION_TYPE_CHAT], + multiple + [DataStore][google.cloud.discoveryengine.v1.DataStore]s in + the same + [Collection][google.cloud.discoveryengine.v1.Collection] can + be associated here. + + Note that when used in + [CreateEngineRequest][google.cloud.discoveryengine.v1.CreateEngineRequest], + one DataStore id must be provided as the system will use it + for necessary initializations. + solution_type (google.cloud.discoveryengine_v1.types.SolutionType): + Required. The solutions of the engine. + industry_vertical (google.cloud.discoveryengine_v1.types.IndustryVertical): + The industry vertical that the engine registers. The + restriction of the Engine industry vertical is based on + [DataStore][google.cloud.discoveryengine.v1.DataStore]: If + unspecified, default to ``GENERIC``. Vertical on Engine has + to match vertical of the DataStore liniked to the engine. + common_config (google.cloud.discoveryengine_v1.types.Engine.CommonConfig): + Common config spec that specifies the + metadata of the engine. + """ + + class SearchEngineConfig(proto.Message): + r"""Configurations for a Search Engine. + + Attributes: + search_tier (google.cloud.discoveryengine_v1.types.SearchTier): + The search feature tier of this engine. + + Different tiers might have different pricing. To learn more, + please check the pricing documentation. + + Defaults to + [SearchTier.SEARCH_TIER_STANDARD][google.cloud.discoveryengine.v1.SearchTier.SEARCH_TIER_STANDARD] + if not specified. + search_add_ons (MutableSequence[google.cloud.discoveryengine_v1.types.SearchAddOn]): + The add-on that this search engine enables. + """ + + search_tier: common.SearchTier = proto.Field( + proto.ENUM, + number=1, + enum=common.SearchTier, + ) + search_add_ons: MutableSequence[common.SearchAddOn] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=common.SearchAddOn, + ) + + class ChatEngineConfig(proto.Message): + r"""Configurations for a Chat Engine. + + Attributes: + agent_creation_config (google.cloud.discoveryengine_v1.types.Engine.ChatEngineConfig.AgentCreationConfig): + The configurationt generate the Dialogflow agent that is + associated to this Engine. + + Note that these configurations are one-time consumed by and + passed to Dialogflow service. It means they cannot be + retrieved using + [EngineService.GetEngine][google.cloud.discoveryengine.v1.EngineService.GetEngine] + or + [EngineService.ListEngines][google.cloud.discoveryengine.v1.EngineService.ListEngines] + API after engine creation. + dialogflow_agent_to_link (str): + The resource name of an exist Dialogflow agent to link to + this Chat Engine. Customers can either provide + ``agent_creation_config`` to create agent or provide an + agent name that links the agent with the Chat engine. + + Format: + ``projects//locations//agents/``. + + Note that the ``dialogflow_agent_to_link`` are one-time + consumed by and passed to Dialogflow service. It means they + cannot be retrieved using + [EngineService.GetEngine][google.cloud.discoveryengine.v1.EngineService.GetEngine] + or + [EngineService.ListEngines][google.cloud.discoveryengine.v1.EngineService.ListEngines] + API after engine creation. Please use + [ChatEngineMetadata.dialogflow_agent][google.cloud.discoveryengine.v1.Engine.ChatEngineMetadata.dialogflow_agent] + for actual agent association after Engine is created. + """ + + class AgentCreationConfig(proto.Message): + r"""Configurations for generating a Dialogflow agent. + + Note that these configurations are one-time consumed by and passed + to Dialogflow service. It means they cannot be retrieved using + [EngineService.GetEngine][google.cloud.discoveryengine.v1.EngineService.GetEngine] + or + [EngineService.ListEngines][google.cloud.discoveryengine.v1.EngineService.ListEngines] + API after engine creation. + + Attributes: + business (str): + Name of the company, organization or other + entity that the agent represents. Used for + knowledge connector LLM prompt and for knowledge + search. + default_language_code (str): + Required. The default language of the agent as a language + tag. See `Language + Support `__ + for a list of the currently supported language codes. + time_zone (str): + Required. The time zone of the agent from the `time zone + database `__, e.g., + America/New_York, Europe/Paris. + location (str): + Agent location for Agent creation, supported + values: global/us/eu. If not provided, us Engine + will create Agent using us-central-1 by default; + eu Engine will create Agent using eu-west-1 by + default. + """ + + business: str = proto.Field( + proto.STRING, + number=1, + ) + default_language_code: str = proto.Field( + proto.STRING, + number=2, + ) + time_zone: str = proto.Field( + proto.STRING, + number=3, + ) + location: str = proto.Field( + proto.STRING, + number=4, + ) + + agent_creation_config: "Engine.ChatEngineConfig.AgentCreationConfig" = ( + proto.Field( + proto.MESSAGE, + number=1, + message="Engine.ChatEngineConfig.AgentCreationConfig", + ) + ) + dialogflow_agent_to_link: str = proto.Field( + proto.STRING, + number=2, + ) + + class CommonConfig(proto.Message): + r"""Common configurations for an Engine. + + Attributes: + company_name (str): + Immutable. The name of the company, business + or entity that is associated with the engine. + Setting this may help improve LLM related + features. + """ + + company_name: str = proto.Field( + proto.STRING, + number=1, + ) + + class ChatEngineMetadata(proto.Message): + r"""Additional information of a Chat Engine. + Fields in this message are output only. + + Attributes: + dialogflow_agent (str): + The resource name of a Dialogflow agent, that this Chat + Engine refers to. + + Format: + ``projects//locations//agents/``. + """ + + dialogflow_agent: str = proto.Field( + proto.STRING, + number=1, + ) + + chat_engine_config: ChatEngineConfig = proto.Field( + proto.MESSAGE, + number=11, + oneof="engine_config", + message=ChatEngineConfig, + ) + search_engine_config: SearchEngineConfig = proto.Field( + proto.MESSAGE, + number=13, + oneof="engine_config", + message=SearchEngineConfig, + ) + chat_engine_metadata: ChatEngineMetadata = proto.Field( + proto.MESSAGE, + number=12, + oneof="engine_metadata", + message=ChatEngineMetadata, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + data_store_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + solution_type: common.SolutionType = proto.Field( + proto.ENUM, + number=6, + enum=common.SolutionType, + ) + industry_vertical: common.IndustryVertical = proto.Field( + proto.ENUM, + number=16, + enum=common.IndustryVertical, + ) + common_config: CommonConfig = proto.Field( + proto.MESSAGE, + number=15, + message=CommonConfig, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/engine_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/engine_service.py new file mode 100644 index 000000000000..83a2b955d5a6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/engine_service.py @@ -0,0 +1,278 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import engine as gcd_engine + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "CreateEngineRequest", + "CreateEngineMetadata", + "DeleteEngineRequest", + "DeleteEngineMetadata", + "GetEngineRequest", + "ListEnginesRequest", + "ListEnginesResponse", + "UpdateEngineRequest", + }, +) + + +class CreateEngineRequest(proto.Message): + r"""Request for + [EngineService.CreateEngine][google.cloud.discoveryengine.v1.EngineService.CreateEngine] + method. + + Attributes: + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}``. + engine (google.cloud.discoveryengine_v1.types.Engine): + Required. The + [Engine][google.cloud.discoveryengine.v1.Engine] to create. + engine_id (str): + Required. The ID to use for the + [Engine][google.cloud.discoveryengine.v1.Engine], which will + become the final component of the + [Engine][google.cloud.discoveryengine.v1.Engine]'s resource + name. + + This field must conform to + `RFC-1034 `__ standard + with a length limit of 63 characters. Otherwise, an + INVALID_ARGUMENT error is returned. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + engine: gcd_engine.Engine = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_engine.Engine, + ) + engine_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class CreateEngineMetadata(proto.Message): + r"""Metadata related to the progress of the + [EngineService.CreateEngine][google.cloud.discoveryengine.v1.EngineService.CreateEngine] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class DeleteEngineRequest(proto.Message): + r"""Request message for + [EngineService.DeleteEngine][google.cloud.discoveryengine.v1.EngineService.DeleteEngine] + method. + + Attributes: + name (str): + Required. Full resource name of + [Engine][google.cloud.discoveryengine.v1.Engine], such as + ``projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}``. + + If the caller does not have permission to delete the + [Engine][google.cloud.discoveryengine.v1.Engine], regardless + of whether or not it exists, a PERMISSION_DENIED error is + returned. + + If the [Engine][google.cloud.discoveryengine.v1.Engine] to + delete does not exist, a NOT_FOUND error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteEngineMetadata(proto.Message): + r"""Metadata related to the progress of the + [EngineService.DeleteEngine][google.cloud.discoveryengine.v1.EngineService.DeleteEngine] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class GetEngineRequest(proto.Message): + r"""Request message for + [EngineService.GetEngine][google.cloud.discoveryengine.v1.EngineService.GetEngine] + method. + + Attributes: + name (str): + Required. Full resource name of + [Engine][google.cloud.discoveryengine.v1.Engine], such as + ``projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListEnginesRequest(proto.Message): + r"""Request message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1.EngineService.ListEngines] + method. + + Attributes: + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection_id}``. + page_size (int): + Optional. Not supported. + page_token (str): + Optional. Not supported. + filter (str): + Optional. Filter by solution type. For example: + solution_type=SOLUTION_TYPE_SEARCH + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListEnginesResponse(proto.Message): + r"""Response message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1.EngineService.ListEngines] + method. + + Attributes: + engines (MutableSequence[google.cloud.discoveryengine_v1.types.Engine]): + All the customer's + [Engine][google.cloud.discoveryengine.v1.Engine]s. + next_page_token (str): + Not supported. + """ + + @property + def raw_page(self): + return self + + engines: MutableSequence[gcd_engine.Engine] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_engine.Engine, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateEngineRequest(proto.Message): + r"""Request message for + [EngineService.UpdateEngine][google.cloud.discoveryengine.v1.EngineService.UpdateEngine] + method. + + Attributes: + engine (google.cloud.discoveryengine_v1.types.Engine): + Required. The + [Engine][google.cloud.discoveryengine.v1.Engine] to update. + + If the caller does not have permission to update the + [Engine][google.cloud.discoveryengine.v1.Engine], regardless + of whether or not it exists, a PERMISSION_DENIED error is + returned. + + If the [Engine][google.cloud.discoveryengine.v1.Engine] to + update does not exist, a NOT_FOUND error is returned. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [Engine][google.cloud.discoveryengine.v1.Engine] to update. + + If an unsupported or unknown field is provided, an + INVALID_ARGUMENT error is returned. + """ + + engine: gcd_engine.Engine = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_engine.Engine, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py index 1c257bbae26c..f43dc2183543 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py @@ -22,7 +22,7 @@ from google.type import date_pb2 # type: ignore import proto # type: ignore -from google.cloud.discoveryengine_v1.types import document, user_event +from google.cloud.discoveryengine_v1.types import completion, document, user_event __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1", @@ -36,6 +36,9 @@ "ImportDocumentsMetadata", "ImportDocumentsRequest", "ImportDocumentsResponse", + "ImportSuggestionDenyListEntriesRequest", + "ImportSuggestionDenyListEntriesResponse", + "ImportSuggestionDenyListEntriesMetadata", }, ) @@ -600,4 +603,131 @@ class ImportDocumentsResponse(proto.Message): ) +class ImportSuggestionDenyListEntriesRequest(proto.Message): + r"""Request message for + [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.ImportSuggestionDenyListEntries] + method. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + inline_source (google.cloud.discoveryengine_v1.types.ImportSuggestionDenyListEntriesRequest.InlineSource): + The Inline source for the input content for + suggestion deny list entries. + + This field is a member of `oneof`_ ``source``. + gcs_source (google.cloud.discoveryengine_v1.types.GcsSource): + Cloud Storage location for the input content. + + Only 1 file can be specified that contains all entries to + import. Supported values ``gcs_source.schema`` for + autocomplete suggestion deny list entry imports: + + - ``suggestion_deny_list`` (default): One JSON + [SuggestionDenyListEntry] per line. + + This field is a member of `oneof`_ ``source``. + parent (str): + Required. The parent data store resource name for which to + import denylist entries. Follows pattern + projects/\ */locations/*/collections/*/dataStores/*. + """ + + class InlineSource(proto.Message): + r"""The inline source for SuggestionDenyListEntry. + + Attributes: + entries (MutableSequence[google.cloud.discoveryengine_v1.types.SuggestionDenyListEntry]): + Required. A list of all denylist entries to + import. Max of 1000 items. + """ + + entries: MutableSequence[ + completion.SuggestionDenyListEntry + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=completion.SuggestionDenyListEntry, + ) + + inline_source: InlineSource = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message=InlineSource, + ) + gcs_source: "GcsSource" = proto.Field( + proto.MESSAGE, + number=3, + oneof="source", + message="GcsSource", + ) + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ImportSuggestionDenyListEntriesResponse(proto.Message): + r"""Response message for + [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.ImportSuggestionDenyListEntries] + method. + + Attributes: + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while + processing the request. + imported_entries_count (int): + Count of deny list entries successfully + imported. + failed_entries_count (int): + Count of deny list entries that failed to be + imported. + """ + + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + imported_entries_count: int = proto.Field( + proto.INT64, + number=2, + ) + failed_entries_count: int = proto.Field( + proto.INT64, + number=3, + ) + + +class ImportSuggestionDenyListEntriesMetadata(proto.Message): + r"""Metadata related to the progress of the + ImportSuggestionDenyListEntries operation. This is returned by + the google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py index f0b9f6115c6f..1d53c62a2cb5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -26,6 +27,9 @@ "PurgeDocumentsRequest", "PurgeDocumentsResponse", "PurgeDocumentsMetadata", + "PurgeSuggestionDenyListEntriesRequest", + "PurgeSuggestionDenyListEntriesResponse", + "PurgeSuggestionDenyListEntriesMetadata", }, ) @@ -128,4 +132,72 @@ class PurgeDocumentsMetadata(proto.Message): ) +class PurgeSuggestionDenyListEntriesRequest(proto.Message): + r"""Request message for + [CompletionService.PurgeSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.PurgeSuggestionDenyListEntries] + method. + + Attributes: + parent (str): + Required. The parent data store resource name for which to + import denylist entries. Follows pattern + projects/\ */locations/*/collections/*/dataStores/*. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PurgeSuggestionDenyListEntriesResponse(proto.Message): + r"""Response message for + [CompletionService.PurgeSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.PurgeSuggestionDenyListEntries] + method. + + Attributes: + purge_count (int): + Number of suggestion deny list entries + purged. + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while + processing the request. + """ + + purge_count: int = proto.Field( + proto.INT64, + number=1, + ) + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + + +class PurgeSuggestionDenyListEntriesMetadata(proto.Message): + r"""Metadata related to the progress of the + PurgeSuggestionDenyListEntries operation. This is returned by + the google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py index 7e7cf61a8014..ecdc083ef80a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py @@ -41,6 +41,8 @@ class SearchRequest(proto.Message): serving_config (str): Required. The resource name of the Search serving config, such as + ``projects/*/locations/global/collections/default_collection/engines/*/servingConfigs/default_serving_config``, + or ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store/servingConfigs/default_serving_config``. This field is used to identify the serving configuration name, set of models used to make the search. @@ -91,12 +93,39 @@ class SearchRequest(proto.Message): If this field is unrecognizable, an ``INVALID_ARGUMENT`` is returned. + + Filtering in Vertex AI Search is done by mapping the LHS + filter key to a key property defined in the Vertex AI Search + backend -- this mapping is defined by the customer in their + schema. For example a media customer might have a field + 'name' in their schema. In this case the filter would look + like this: filter --> name:'ANY("king kong")' + + For more information about filtering including syntax and + filter operators, see + `Filter `__ + canonical_filter (str): + The default filter that is applied when a user performs a + search without checking any filters on the search page. + + The filter applied to every search request when quality + improvement such as query expansion is needed. In the case a + query does not have a sufficient amount of results this + filter will be used to determine whether or not to enable + the query expansion flow. The original filter will still be + used for the query expanded search. This field is strongly + recommended to achieve high search quality. + + For more information about filter syntax, see + [SearchRequest.filter][google.cloud.discoveryengine.v1.SearchRequest.filter]. order_by (str): The order in which documents are returned. Documents can be ordered by a field in an [Document][google.cloud.discoveryengine.v1.Document] object. Leave it unset if ordered by relevance. ``order_by`` - expression is case-sensitive. + expression is case-sensitive. For more information on + ordering, see + `Ordering `__ If this field is unrecognizable, an ``INVALID_ARGUMENT`` is returned. @@ -112,8 +141,9 @@ class SearchRequest(proto.Message): A maximum of 100 values are allowed. Otherwise, an ``INVALID_ARGUMENT`` error is returned. boost_spec (google.cloud.discoveryengine_v1.types.SearchRequest.BoostSpec): - Boost specification to boost certain - documents. + Boost specification to boost certain documents. For more + information on boosting, see + `Boosting `__ params (MutableMapping[str, google.protobuf.struct_pb2.Value]): Additional search parameters. @@ -121,11 +151,15 @@ class SearchRequest(proto.Message): - ``user_country_code``: string. Default empty. If set to non-empty, results are restricted or boosted based on the - location provided. + location provided. Example: user_country_code: "au" + + For available codes see `Country + Codes `__ + - ``search_type``: double. Default empty. Enables non-webpage searching depending on the value. The only valid non-default value is 1, which enables image - searching. + searching. Example: search_type: 1 query_expansion_spec (google.cloud.discoveryengine_v1.types.SearchRequest.QueryExpansionSpec): The query expansion specification that specifies the conditions under which query @@ -650,11 +684,55 @@ class SummarySpec(proto.Message): navigational queries. If this field is set to ``true``, we skip generating summaries for non-summary seeking queries and return fallback messages instead. + model_prompt_spec (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec): + If specified, the spec will be used to modify + the prompt provided to the LLM. language_code (str): Language code for Summary. Use language tags defined by - [BCP47][https://www.rfc-editor.org/rfc/bcp/bcp47.txt]. + `BCP47 `__. + Note: This is an experimental feature. + model_spec (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.SummarySpec.ModelSpec): + If specified, the spec will be used to modify + the model specification provided to the LLM. """ + class ModelPromptSpec(proto.Message): + r"""Specification of the prompt to use with the model. + + Attributes: + preamble (str): + Text at the beginning of the prompt that + instructs the assistant. Examples are available + in the user guide. + """ + + preamble: str = proto.Field( + proto.STRING, + number=1, + ) + + class ModelSpec(proto.Message): + r"""Specification of the model. + + Attributes: + version (str): + The model version used to generate the summary. + + Supported values are: + + - ``stable``: string. Default value when no value is + specified. Uses a generally available, fine-tuned version + of the text-bison@001 model. + - ``preview``: string. (Public preview) Uses a fine-tuned + version of the text-bison@002 model. This model works + only for summaries in English. + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + summary_result_count: int = proto.Field( proto.INT32, number=1, @@ -671,10 +749,22 @@ class SummarySpec(proto.Message): proto.BOOL, number=4, ) + model_prompt_spec: "SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec" = proto.Field( + proto.MESSAGE, + number=5, + message="SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec", + ) language_code: str = proto.Field( proto.STRING, number=6, ) + model_spec: "SearchRequest.ContentSearchSpec.SummarySpec.ModelSpec" = ( + proto.Field( + proto.MESSAGE, + number=7, + message="SearchRequest.ContentSearchSpec.SummarySpec.ModelSpec", + ) + ) snippet_spec: "SearchRequest.ContentSearchSpec.SnippetSpec" = proto.Field( proto.MESSAGE, @@ -720,6 +810,10 @@ class SummarySpec(proto.Message): proto.STRING, number=7, ) + canonical_filter: str = proto.Field( + proto.STRING, + number=29, + ) order_by: str = proto.Field( proto.STRING, number=8, @@ -935,6 +1029,8 @@ class Summary(proto.Message): safety_attributes (google.cloud.discoveryengine_v1.types.SearchResponse.Summary.SafetyAttributes): A collection of Safety Attribute categories and their associated confidence scores. + summary_with_metadata (google.cloud.discoveryengine_v1.types.SearchResponse.Summary.SummaryWithMetadata): + Summary with metadata information. """ class SummarySkippedReason(proto.Enum): @@ -1007,6 +1103,125 @@ class SafetyAttributes(proto.Message): number=2, ) + class CitationMetadata(proto.Message): + r"""Citation metadata. + + Attributes: + citations (MutableSequence[google.cloud.discoveryengine_v1.types.SearchResponse.Summary.Citation]): + Citations for segments. + """ + + citations: MutableSequence[ + "SearchResponse.Summary.Citation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SearchResponse.Summary.Citation", + ) + + class Citation(proto.Message): + r"""Citation info for a segment. + + Attributes: + start_index (int): + Index indicates the start of the segment, + measured in bytes/unicode. + end_index (int): + End of the attributed segment, exclusive. + sources (MutableSequence[google.cloud.discoveryengine_v1.types.SearchResponse.Summary.CitationSource]): + Citation sources for the attributed segment. + """ + + start_index: int = proto.Field( + proto.INT64, + number=1, + ) + end_index: int = proto.Field( + proto.INT64, + number=2, + ) + sources: MutableSequence[ + "SearchResponse.Summary.CitationSource" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="SearchResponse.Summary.CitationSource", + ) + + class CitationSource(proto.Message): + r"""Citation source. + + Attributes: + reference_index (int): + Document reference index from + SummaryWithMetadata.references. It is 0-indexed and the + value will be zero if the reference_index is not set + explicitly. + """ + + reference_index: int = proto.Field( + proto.INT64, + number=4, + ) + + class Reference(proto.Message): + r"""Document reference. + + Attributes: + title (str): + Title of the document. + document (str): + Required. + [Document.name][google.cloud.discoveryengine.v1.Document.name] + of the document. Full resource name of the referenced + document, in the format + ``projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*``. + uri (str): + Cloud Storage or HTTP uri for the document. + """ + + title: str = proto.Field( + proto.STRING, + number=1, + ) + document: str = proto.Field( + proto.STRING, + number=2, + ) + uri: str = proto.Field( + proto.STRING, + number=3, + ) + + class SummaryWithMetadata(proto.Message): + r"""Summary with metadata information. + + Attributes: + summary (str): + Summary text with no citation information. + citation_metadata (google.cloud.discoveryengine_v1.types.SearchResponse.Summary.CitationMetadata): + Citation metadata for given summary. + references (MutableSequence[google.cloud.discoveryengine_v1.types.SearchResponse.Summary.Reference]): + Document References. + """ + + summary: str = proto.Field( + proto.STRING, + number=1, + ) + citation_metadata: "SearchResponse.Summary.CitationMetadata" = proto.Field( + proto.MESSAGE, + number=2, + message="SearchResponse.Summary.CitationMetadata", + ) + references: MutableSequence[ + "SearchResponse.Summary.Reference" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="SearchResponse.Summary.Reference", + ) + summary_text: str = proto.Field( proto.STRING, number=1, @@ -1023,6 +1238,13 @@ class SafetyAttributes(proto.Message): number=3, message="SearchResponse.Summary.SafetyAttributes", ) + summary_with_metadata: "SearchResponse.Summary.SummaryWithMetadata" = ( + proto.Field( + proto.MESSAGE, + number=4, + message="SearchResponse.Summary.SummaryWithMetadata", + ) + ) class QueryExpansionInfo(proto.Message): r"""Information describing query expansion including whether diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/site_search_engine.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/site_search_engine.py new file mode 100644 index 000000000000..741036fc6d5c --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/site_search_engine.py @@ -0,0 +1,257 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "SiteSearchEngine", + "TargetSite", + "SiteVerificationInfo", + }, +) + + +class SiteSearchEngine(proto.Message): + r"""SiteSearchEngine captures DataStore level site search + persisting configurations. It is a singleton value per data + store. + + Attributes: + name (str): + The fully qualified resource name of the site search engine. + Format: + ``projects/*/locations/*/dataStores/*/siteSearchEngine`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class TargetSite(proto.Message): + r"""A target site for the SiteSearchEngine. + + Attributes: + name (str): + Output only. The fully qualified resource name of the target + site. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}`` + The ``target_site_id`` is system-generated. + provided_uri_pattern (str): + Required. Input only. The user provided URI pattern from + which the ``generated_uri_pattern`` is generated. + type_ (google.cloud.discoveryengine_v1.types.TargetSite.Type): + The type of the target site, e.g., whether + the site is to be included or excluded. + exact_match (bool): + Input only. If set to false, a uri_pattern is generated to + include all pages whose address contains the + provided_uri_pattern. If set to true, an uri_pattern is + generated to try to be an exact match of the + provided_uri_pattern or just the specific page if the + provided_uri_pattern is a specific one. provided_uri_pattern + is always normalized to generate the URI pattern to be used + by the search engine. + generated_uri_pattern (str): + Output only. This is system-generated based on the + provided_uri_pattern. + site_verification_info (google.cloud.discoveryengine_v1.types.SiteVerificationInfo): + Output only. Site ownership and validity + verification status. + indexing_status (google.cloud.discoveryengine_v1.types.TargetSite.IndexingStatus): + Output only. Indexing status. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The target site's last updated + time. + failure_reason (google.cloud.discoveryengine_v1.types.TargetSite.FailureReason): + Output only. Failure reason. + """ + + class Type(proto.Enum): + r"""Possible target site types. + + Values: + TYPE_UNSPECIFIED (0): + This value is unused. In this case, server behavior defaults + to + [Type.INCLUDE][google.cloud.discoveryengine.v1.TargetSite.Type.INCLUDE]. + INCLUDE (1): + Include the target site. + EXCLUDE (2): + Exclude the target site. + """ + TYPE_UNSPECIFIED = 0 + INCLUDE = 1 + EXCLUDE = 2 + + class IndexingStatus(proto.Enum): + r"""Target site indexing status enumeration. + + Values: + INDEXING_STATUS_UNSPECIFIED (0): + Defaults to SUCCEEDED. + PENDING (1): + The target site is in the update queue and + will be picked up by indexing pipeline. + FAILED (2): + The target site fails to be indexed. + SUCCEEDED (3): + The target site has been indexed. + DELETING (4): + The previously indexed target site has been + marked to be deleted. This is a transitioning + state which will resulted in either: + + 1. target site deleted if unindexing is + successful; + 2. state reverts to SUCCEEDED if the unindexing + fails. + """ + INDEXING_STATUS_UNSPECIFIED = 0 + PENDING = 1 + FAILED = 2 + SUCCEEDED = 3 + DELETING = 4 + + class FailureReason(proto.Message): + r"""Site search indexing failure reasons. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + quota_failure (google.cloud.discoveryengine_v1.types.TargetSite.FailureReason.QuotaFailure): + Failed due to insufficient quota. + + This field is a member of `oneof`_ ``failure``. + """ + + class QuotaFailure(proto.Message): + r"""Failed due to insufficient quota. + + Attributes: + total_required_quota (int): + This number is an estimation on how much + total quota this project needs to successfully + complete indexing. + """ + + total_required_quota: int = proto.Field( + proto.INT64, + number=1, + ) + + quota_failure: "TargetSite.FailureReason.QuotaFailure" = proto.Field( + proto.MESSAGE, + number=1, + oneof="failure", + message="TargetSite.FailureReason.QuotaFailure", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + provided_uri_pattern: str = proto.Field( + proto.STRING, + number=2, + ) + type_: Type = proto.Field( + proto.ENUM, + number=3, + enum=Type, + ) + exact_match: bool = proto.Field( + proto.BOOL, + number=6, + ) + generated_uri_pattern: str = proto.Field( + proto.STRING, + number=4, + ) + site_verification_info: "SiteVerificationInfo" = proto.Field( + proto.MESSAGE, + number=7, + message="SiteVerificationInfo", + ) + indexing_status: IndexingStatus = proto.Field( + proto.ENUM, + number=8, + enum=IndexingStatus, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + failure_reason: FailureReason = proto.Field( + proto.MESSAGE, + number=9, + message=FailureReason, + ) + + +class SiteVerificationInfo(proto.Message): + r"""Verification information for target sites in advanced site + search. + + Attributes: + site_verification_state (google.cloud.discoveryengine_v1.types.SiteVerificationInfo.SiteVerificationState): + Site verification state indicating the + ownership and validity. + verify_time (google.protobuf.timestamp_pb2.Timestamp): + Latest site verification time. + """ + + class SiteVerificationState(proto.Enum): + r"""Site verification state. + + Values: + SITE_VERIFICATION_STATE_UNSPECIFIED (0): + Defaults to VERIFIED. + VERIFIED (1): + Site ownership verified. + UNVERIFIED (2): + Site ownership pending verification or + verification failed. + EXEMPTED (3): + Site exempt from verification, e.g., a public + website that opens to all. + """ + SITE_VERIFICATION_STATE_UNSPECIFIED = 0 + VERIFIED = 1 + UNVERIFIED = 2 + EXEMPTED = 3 + + site_verification_state: SiteVerificationState = proto.Field( + proto.ENUM, + number=1, + enum=SiteVerificationState, + ) + verify_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/site_search_engine_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/site_search_engine_service.py new file mode 100644 index 000000000000..18c05ebc17f8 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/site_search_engine_service.py @@ -0,0 +1,851 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + site_search_engine as gcd_site_search_engine, +) + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "GetSiteSearchEngineRequest", + "CreateTargetSiteRequest", + "CreateTargetSiteMetadata", + "BatchCreateTargetSitesRequest", + "GetTargetSiteRequest", + "UpdateTargetSiteRequest", + "UpdateTargetSiteMetadata", + "DeleteTargetSiteRequest", + "DeleteTargetSiteMetadata", + "ListTargetSitesRequest", + "ListTargetSitesResponse", + "BatchCreateTargetSiteMetadata", + "BatchCreateTargetSitesResponse", + "EnableAdvancedSiteSearchRequest", + "EnableAdvancedSiteSearchResponse", + "EnableAdvancedSiteSearchMetadata", + "DisableAdvancedSiteSearchRequest", + "DisableAdvancedSiteSearchResponse", + "DisableAdvancedSiteSearchMetadata", + "RecrawlUrisRequest", + "RecrawlUrisResponse", + "RecrawlUrisMetadata", + "BatchVerifyTargetSitesRequest", + "BatchVerifyTargetSitesResponse", + "BatchVerifyTargetSitesMetadata", + "FetchDomainVerificationStatusRequest", + "FetchDomainVerificationStatusResponse", + }, +) + + +class GetSiteSearchEngineRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.GetSiteSearchEngine][google.cloud.discoveryengine.v1.SiteSearchEngineService.GetSiteSearchEngine] + method. + + Attributes: + name (str): + Required. Resource name of + [SiteSearchEngine][google.cloud.discoveryengine.v1.SiteSearchEngine], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to access the + [SiteSearchEngine], regardless of whether or not it exists, + a PERMISSION_DENIED error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateTargetSiteRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.CreateTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.CreateTargetSite] + method. + + Attributes: + parent (str): + Required. Parent resource name of + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + target_site (google.cloud.discoveryengine_v1.types.TargetSite): + Required. The + [TargetSite][google.cloud.discoveryengine.v1.TargetSite] to + create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + target_site: gcd_site_search_engine.TargetSite = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_site_search_engine.TargetSite, + ) + + +class CreateTargetSiteMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.CreateTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.CreateTargetSite] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class BatchCreateTargetSitesRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites] + method. + + Attributes: + parent (str): + Required. The parent resource shared by all TargetSites + being created. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + The parent field in the CreateBookRequest messages must + either be empty or match this field. + requests (MutableSequence[google.cloud.discoveryengine_v1.types.CreateTargetSiteRequest]): + Required. The request message specifying the + resources to create. A maximum of 20 TargetSites + can be created in a batch. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["CreateTargetSiteRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CreateTargetSiteRequest", + ) + + +class GetTargetSiteRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.GetTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.GetTargetSite] + method. + + Attributes: + name (str): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1.TargetSite] + does not exist, a NOT_FOUND error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateTargetSiteRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.UpdateTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.UpdateTargetSite] + method. + + Attributes: + target_site (google.cloud.discoveryengine_v1.types.TargetSite): + Required. The target site to update. If the caller does not + have permission to update the + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the + [TargetSite][google.cloud.discoveryengine.v1.TargetSite] to + update does not exist, a NOT_FOUND error is returned. + """ + + target_site: gcd_site_search_engine.TargetSite = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_site_search_engine.TargetSite, + ) + + +class UpdateTargetSiteMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.UpdateTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.UpdateTargetSite] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class DeleteTargetSiteRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.DeleteTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.DeleteTargetSite] + method. + + Attributes: + name (str): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1.TargetSite], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1.TargetSite] + does not exist, a NOT_FOUND error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteTargetSiteMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.DeleteTargetSite][google.cloud.discoveryengine.v1.SiteSearchEngineService.DeleteTargetSite] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class ListTargetSitesRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.ListTargetSites] + method. + + Attributes: + parent (str): + Required. The parent site search engine resource name, such + as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to list + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]s + under this site search engine, regardless of whether or not + this branch exists, a PERMISSION_DENIED error is returned. + page_size (int): + Requested page size. Server may return fewer items than + requested. If unspecified, server will pick an appropriate + default. The maximum value is 1000; values above 1000 will + be coerced to 1000. + + If this field is negative, an INVALID_ARGUMENT error is + returned. + page_token (str): + A page token, received from a previous ``ListTargetSites`` + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListTargetSites`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTargetSitesResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.ListTargetSites] + method. + + Attributes: + target_sites (MutableSequence[google.cloud.discoveryengine_v1.types.TargetSite]): + List of TargetSites. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + total_size (int): + The total number of items matching the + request. This will always be populated in the + response. + """ + + @property + def raw_page(self): + return self + + target_sites: MutableSequence[ + gcd_site_search_engine.TargetSite + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_site_search_engine.TargetSite, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class BatchCreateTargetSiteMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class BatchCreateTargetSitesResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites] + method. + + Attributes: + target_sites (MutableSequence[google.cloud.discoveryengine_v1.types.TargetSite]): + TargetSites created. + """ + + target_sites: MutableSequence[ + gcd_site_search_engine.TargetSite + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_site_search_engine.TargetSite, + ) + + +class EnableAdvancedSiteSearchRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + + Attributes: + site_search_engine (str): + Required. Full resource name of the + [SiteSearchEngine][google.cloud.discoveryengine.v1.SiteSearchEngine], + such as + ``projects/{project}/locations/{location}/dataStores/{data_store_id}/siteSearchEngine``. + """ + + site_search_engine: str = proto.Field( + proto.STRING, + number=1, + ) + + +class EnableAdvancedSiteSearchResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + + """ + + +class EnableAdvancedSiteSearchMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.EnableAdvancedSiteSearch] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class DisableAdvancedSiteSearchRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + + Attributes: + site_search_engine (str): + Required. Full resource name of the + [SiteSearchEngine][google.cloud.discoveryengine.v1.SiteSearchEngine], + such as + ``projects/{project}/locations/{location}/dataStores/{data_store_id}/siteSearchEngine``. + """ + + site_search_engine: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DisableAdvancedSiteSearchResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + + """ + + +class DisableAdvancedSiteSearchMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1.SiteSearchEngineService.DisableAdvancedSiteSearch] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class RecrawlUrisRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1.SiteSearchEngineService.RecrawlUris] + method. + + Attributes: + site_search_engine (str): + Required. Full resource name of the + [SiteSearchEngine][google.cloud.discoveryengine.v1.SiteSearchEngine], + such as + ``projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine``. + uris (MutableSequence[str]): + Required. List of URIs to crawl. At most 10K URIs are + supported, otherwise an INVALID_ARGUMENT error is thrown. + Each URI should match at least one + [TargetSite][google.cloud.discoveryengine.v1.TargetSite] in + ``site_search_engine``. + """ + + site_search_engine: str = proto.Field( + proto.STRING, + number=1, + ) + uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class RecrawlUrisResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1.SiteSearchEngineService.RecrawlUris] + method. + + Attributes: + failure_samples (MutableSequence[google.cloud.discoveryengine_v1.types.RecrawlUrisResponse.FailureInfo]): + Details for a sample of up to 10 ``failed_uris``. + failed_uris (MutableSequence[str]): + URIs that were not crawled before the LRO + terminated. + """ + + class FailureInfo(proto.Message): + r"""Details about why a particular URI failed to be crawled. Each + FailureInfo contains one FailureReason per CorpusType. + + Attributes: + uri (str): + URI that failed to be crawled. + failure_reasons (MutableSequence[google.cloud.discoveryengine_v1.types.RecrawlUrisResponse.FailureInfo.FailureReason]): + List of failure reasons by corpus type (e.g. + desktop, mobile). + """ + + class FailureReason(proto.Message): + r"""Details about why crawling failed for a particular + CorpusType, e.g., DESKTOP and MOBILE crawling may fail for + different reasons. + + Attributes: + corpus_type (google.cloud.discoveryengine_v1.types.RecrawlUrisResponse.FailureInfo.FailureReason.CorpusType): + DESKTOP, MOBILE, or CORPUS_TYPE_UNSPECIFIED. + error_message (str): + Reason why the URI was not crawled. + """ + + class CorpusType(proto.Enum): + r"""CorpusType for the failed crawling operation. + + Values: + CORPUS_TYPE_UNSPECIFIED (0): + Default value. + DESKTOP (1): + Denotes a crawling attempt for the desktop + version of a page. + MOBILE (2): + Denotes a crawling attempt for the mobile + version of a page. + """ + CORPUS_TYPE_UNSPECIFIED = 0 + DESKTOP = 1 + MOBILE = 2 + + corpus_type: "RecrawlUrisResponse.FailureInfo.FailureReason.CorpusType" = ( + proto.Field( + proto.ENUM, + number=1, + enum="RecrawlUrisResponse.FailureInfo.FailureReason.CorpusType", + ) + ) + error_message: str = proto.Field( + proto.STRING, + number=2, + ) + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + failure_reasons: MutableSequence[ + "RecrawlUrisResponse.FailureInfo.FailureReason" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="RecrawlUrisResponse.FailureInfo.FailureReason", + ) + + failure_samples: MutableSequence[FailureInfo] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=FailureInfo, + ) + failed_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class RecrawlUrisMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1.SiteSearchEngineService.RecrawlUris] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + invalid_uris (MutableSequence[str]): + Unique URIs in the request that don't match + any TargetSite in the DataStore, only match + TargetSites that haven't been fully indexed, or + match a TargetSite with type EXCLUDE. + valid_uris_count (int): + Total number of unique URIs in the request that are not in + invalid_uris. + success_count (int): + Total number of URIs that have been crawled + so far. + pending_count (int): + Total number of URIs that have yet to be + crawled. + quota_exceeded_count (int): + Total number of URIs that were rejected due + to insufficient indexing resources. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + invalid_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + valid_uris_count: int = proto.Field( + proto.INT32, + number=4, + ) + success_count: int = proto.Field( + proto.INT32, + number=5, + ) + pending_count: int = proto.Field( + proto.INT32, + number=6, + ) + quota_exceeded_count: int = proto.Field( + proto.INT32, + number=7, + ) + + +class BatchVerifyTargetSitesRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites] + method. + + Attributes: + parent (str): + Required. The parent resource shared by all TargetSites + being verified. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BatchVerifyTargetSitesResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites] + method. + + """ + + +class BatchVerifyTargetSitesMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class FetchDomainVerificationStatusRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + + Attributes: + site_search_engine (str): + Required. The site search engine resource under which we + fetch all the domain verification status. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + page_size (int): + Requested page size. Server may return fewer items than + requested. If unspecified, server will pick an appropriate + default. The maximum value is 1000; values above 1000 will + be coerced to 1000. + + If this field is negative, an INVALID_ARGUMENT error is + returned. + page_token (str): + A page token, received from a previous + ``FetchDomainVerificationStatus`` call. Provide this to + retrieve the subsequent page. + + When paginating, all other parameters provided to + ``FetchDomainVerificationStatus`` must match the call that + provided the page token. + """ + + site_search_engine: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchDomainVerificationStatusResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + + Attributes: + target_sites (MutableSequence[google.cloud.discoveryengine_v1.types.TargetSite]): + List of TargetSites containing the site + verification status. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + total_size (int): + The total number of items matching the + request. This will always be populated in the + response. + """ + + @property + def raw_page(self): + return self + + target_sites: MutableSequence[ + gcd_site_search_engine.TargetSite + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_site_search_engine.TargetSite, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py index c3cc9df763e5..d804b124786a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py @@ -161,10 +161,9 @@ class UserEvent(proto.Message): conforming to https://google.aip.dev/160#filtering. Similarly, for ``view-item-list`` events that are generated - from a [RecommendationService.RecommendRequest][], this - field may be populated directly from - [RecommendationService.RecommendRequest.filter][] conforming - to https://google.aip.dev/160#filtering. + from a [RecommendRequest][], this field may be populated + directly from [RecommendRequest.filter][] conforming to + https://google.aip.dev/160#filtering. The value must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an ``INVALID_ARGUMENT`` diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py index fb9a6cb2d900..360a0d13ebdd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.7" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py index 81b5304a5998..aa0460a0dd21 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py @@ -26,18 +26,42 @@ ConversationalSearchServiceAsyncClient, ConversationalSearchServiceClient, ) +from .services.data_store_service import ( + DataStoreServiceAsyncClient, + DataStoreServiceClient, +) from .services.document_service import DocumentServiceAsyncClient, DocumentServiceClient +from .services.engine_service import EngineServiceAsyncClient, EngineServiceClient from .services.recommendation_service import ( RecommendationServiceAsyncClient, RecommendationServiceClient, ) from .services.schema_service import SchemaServiceAsyncClient, SchemaServiceClient from .services.search_service import SearchServiceAsyncClient, SearchServiceClient +from .services.serving_config_service import ( + ServingConfigServiceAsyncClient, + ServingConfigServiceClient, +) +from .services.site_search_engine_service import ( + SiteSearchEngineServiceAsyncClient, + SiteSearchEngineServiceClient, +) from .services.user_event_service import ( UserEventServiceAsyncClient, UserEventServiceClient, ) -from .types.common import CustomAttribute, DoubleList, Interval, UserInfo +from .types.common import ( + CustomAttribute, + DoubleList, + EmbeddingConfig, + IndustryVertical, + Interval, + SearchAddOn, + SearchTier, + SolutionType, + UserInfo, +) +from .types.completion import SuggestionDenyListEntry from .types.completion_service import CompleteQueryRequest, CompleteQueryResponse from .types.conversation import ( Conversation, @@ -56,6 +80,17 @@ ListConversationsResponse, UpdateConversationRequest, ) +from .types.data_store import DataStore +from .types.data_store_service import ( + CreateDataStoreMetadata, + CreateDataStoreRequest, + DeleteDataStoreMetadata, + DeleteDataStoreRequest, + GetDataStoreRequest, + ListDataStoresRequest, + ListDataStoresResponse, + UpdateDataStoreRequest, +) from .types.document import Document from .types.document_service import ( CreateDocumentRequest, @@ -65,6 +100,17 @@ ListDocumentsResponse, UpdateDocumentRequest, ) +from .types.engine import Engine +from .types.engine_service import ( + CreateEngineMetadata, + CreateEngineRequest, + DeleteEngineMetadata, + DeleteEngineRequest, + GetEngineRequest, + ListEnginesRequest, + ListEnginesResponse, + UpdateEngineRequest, +) from .types.import_config import ( BigQuerySource, GcsSource, @@ -72,6 +118,9 @@ ImportDocumentsRequest, ImportDocumentsResponse, ImportErrorConfig, + ImportSuggestionDenyListEntriesMetadata, + ImportSuggestionDenyListEntriesRequest, + ImportSuggestionDenyListEntriesResponse, ImportUserEventsMetadata, ImportUserEventsRequest, ImportUserEventsResponse, @@ -80,6 +129,9 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeSuggestionDenyListEntriesMetadata, + PurgeSuggestionDenyListEntriesRequest, + PurgeSuggestionDenyListEntriesResponse, ) from .types.recommendation_service import RecommendRequest, RecommendResponse from .types.schema import Schema @@ -95,6 +147,43 @@ UpdateSchemaRequest, ) from .types.search_service import SearchRequest, SearchResponse +from .types.serving_config import ServingConfig +from .types.serving_config_service import ( + GetServingConfigRequest, + ListServingConfigsRequest, + ListServingConfigsResponse, + UpdateServingConfigRequest, +) +from .types.site_search_engine import SiteSearchEngine, SiteVerificationInfo, TargetSite +from .types.site_search_engine_service import ( + BatchCreateTargetSiteMetadata, + BatchCreateTargetSitesRequest, + BatchCreateTargetSitesResponse, + BatchVerifyTargetSitesMetadata, + BatchVerifyTargetSitesRequest, + BatchVerifyTargetSitesResponse, + CreateTargetSiteMetadata, + CreateTargetSiteRequest, + DeleteTargetSiteMetadata, + DeleteTargetSiteRequest, + DisableAdvancedSiteSearchMetadata, + DisableAdvancedSiteSearchRequest, + DisableAdvancedSiteSearchResponse, + EnableAdvancedSiteSearchMetadata, + EnableAdvancedSiteSearchRequest, + EnableAdvancedSiteSearchResponse, + FetchDomainVerificationStatusRequest, + FetchDomainVerificationStatusResponse, + GetSiteSearchEngineRequest, + GetTargetSiteRequest, + ListTargetSitesRequest, + ListTargetSitesResponse, + RecrawlUrisMetadata, + RecrawlUrisRequest, + RecrawlUrisResponse, + UpdateTargetSiteMetadata, + UpdateTargetSiteRequest, +) from .types.user_event import ( CompletionInfo, DocumentInfo, @@ -110,11 +199,21 @@ __all__ = ( "CompletionServiceAsyncClient", "ConversationalSearchServiceAsyncClient", + "DataStoreServiceAsyncClient", "DocumentServiceAsyncClient", + "EngineServiceAsyncClient", "RecommendationServiceAsyncClient", "SchemaServiceAsyncClient", "SearchServiceAsyncClient", + "ServingConfigServiceAsyncClient", + "SiteSearchEngineServiceAsyncClient", "UserEventServiceAsyncClient", + "BatchCreateTargetSiteMetadata", + "BatchCreateTargetSitesRequest", + "BatchCreateTargetSitesResponse", + "BatchVerifyTargetSitesMetadata", + "BatchVerifyTargetSitesRequest", + "BatchVerifyTargetSitesResponse", "BigQuerySource", "CollectUserEventRequest", "CompleteQueryRequest", @@ -128,58 +227,121 @@ "ConverseConversationRequest", "ConverseConversationResponse", "CreateConversationRequest", + "CreateDataStoreMetadata", + "CreateDataStoreRequest", "CreateDocumentRequest", + "CreateEngineMetadata", + "CreateEngineRequest", "CreateSchemaMetadata", "CreateSchemaRequest", + "CreateTargetSiteMetadata", + "CreateTargetSiteRequest", "CustomAttribute", + "DataStore", + "DataStoreServiceClient", "DeleteConversationRequest", + "DeleteDataStoreMetadata", + "DeleteDataStoreRequest", "DeleteDocumentRequest", + "DeleteEngineMetadata", + "DeleteEngineRequest", "DeleteSchemaMetadata", "DeleteSchemaRequest", + "DeleteTargetSiteMetadata", + "DeleteTargetSiteRequest", + "DisableAdvancedSiteSearchMetadata", + "DisableAdvancedSiteSearchRequest", + "DisableAdvancedSiteSearchResponse", "Document", "DocumentInfo", "DocumentServiceClient", "DoubleList", + "EmbeddingConfig", + "EnableAdvancedSiteSearchMetadata", + "EnableAdvancedSiteSearchRequest", + "EnableAdvancedSiteSearchResponse", + "Engine", + "EngineServiceClient", + "FetchDomainVerificationStatusRequest", + "FetchDomainVerificationStatusResponse", "GcsSource", "GetConversationRequest", + "GetDataStoreRequest", "GetDocumentRequest", + "GetEngineRequest", "GetSchemaRequest", + "GetServingConfigRequest", + "GetSiteSearchEngineRequest", + "GetTargetSiteRequest", "ImportDocumentsMetadata", "ImportDocumentsRequest", "ImportDocumentsResponse", "ImportErrorConfig", + "ImportSuggestionDenyListEntriesMetadata", + "ImportSuggestionDenyListEntriesRequest", + "ImportSuggestionDenyListEntriesResponse", "ImportUserEventsMetadata", "ImportUserEventsRequest", "ImportUserEventsResponse", + "IndustryVertical", "Interval", "ListConversationsRequest", "ListConversationsResponse", + "ListDataStoresRequest", + "ListDataStoresResponse", "ListDocumentsRequest", "ListDocumentsResponse", + "ListEnginesRequest", + "ListEnginesResponse", "ListSchemasRequest", "ListSchemasResponse", + "ListServingConfigsRequest", + "ListServingConfigsResponse", + "ListTargetSitesRequest", + "ListTargetSitesResponse", "MediaInfo", "PageInfo", "PanelInfo", "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeSuggestionDenyListEntriesMetadata", + "PurgeSuggestionDenyListEntriesRequest", + "PurgeSuggestionDenyListEntriesResponse", "RecommendRequest", "RecommendResponse", "RecommendationServiceClient", + "RecrawlUrisMetadata", + "RecrawlUrisRequest", + "RecrawlUrisResponse", "Reply", "Schema", "SchemaServiceClient", + "SearchAddOn", "SearchInfo", "SearchRequest", "SearchResponse", "SearchServiceClient", + "SearchTier", + "ServingConfig", + "ServingConfigServiceClient", + "SiteSearchEngine", + "SiteSearchEngineServiceClient", + "SiteVerificationInfo", + "SolutionType", + "SuggestionDenyListEntry", + "TargetSite", "TextInput", "TransactionInfo", "UpdateConversationRequest", + "UpdateDataStoreRequest", "UpdateDocumentRequest", + "UpdateEngineRequest", "UpdateSchemaMetadata", "UpdateSchemaRequest", + "UpdateServingConfigRequest", + "UpdateTargetSiteMetadata", + "UpdateTargetSiteRequest", "UserEvent", "UserEventServiceClient", "UserInfo", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json index 88141f3bd79e..d6f8fff3a337 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json @@ -14,6 +14,16 @@ "methods": [ "complete_query" ] + }, + "ImportSuggestionDenyListEntries": { + "methods": [ + "import_suggestion_deny_list_entries" + ] + }, + "PurgeSuggestionDenyListEntries": { + "methods": [ + "purge_suggestion_deny_list_entries" + ] } } }, @@ -24,6 +34,16 @@ "methods": [ "complete_query" ] + }, + "ImportSuggestionDenyListEntries": { + "methods": [ + "import_suggestion_deny_list_entries" + ] + }, + "PurgeSuggestionDenyListEntries": { + "methods": [ + "purge_suggestion_deny_list_entries" + ] } } }, @@ -34,6 +54,16 @@ "methods": [ "complete_query" ] + }, + "ImportSuggestionDenyListEntries": { + "methods": [ + "import_suggestion_deny_list_entries" + ] + }, + "PurgeSuggestionDenyListEntries": { + "methods": [ + "purge_suggestion_deny_list_entries" + ] } } } @@ -148,6 +178,100 @@ } } }, + "DataStoreService": { + "clients": { + "grpc": { + "libraryClient": "DataStoreServiceClient", + "rpcs": { + "CreateDataStore": { + "methods": [ + "create_data_store" + ] + }, + "DeleteDataStore": { + "methods": [ + "delete_data_store" + ] + }, + "GetDataStore": { + "methods": [ + "get_data_store" + ] + }, + "ListDataStores": { + "methods": [ + "list_data_stores" + ] + }, + "UpdateDataStore": { + "methods": [ + "update_data_store" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataStoreServiceAsyncClient", + "rpcs": { + "CreateDataStore": { + "methods": [ + "create_data_store" + ] + }, + "DeleteDataStore": { + "methods": [ + "delete_data_store" + ] + }, + "GetDataStore": { + "methods": [ + "get_data_store" + ] + }, + "ListDataStores": { + "methods": [ + "list_data_stores" + ] + }, + "UpdateDataStore": { + "methods": [ + "update_data_store" + ] + } + } + }, + "rest": { + "libraryClient": "DataStoreServiceClient", + "rpcs": { + "CreateDataStore": { + "methods": [ + "create_data_store" + ] + }, + "DeleteDataStore": { + "methods": [ + "delete_data_store" + ] + }, + "GetDataStore": { + "methods": [ + "get_data_store" + ] + }, + "ListDataStores": { + "methods": [ + "list_data_stores" + ] + }, + "UpdateDataStore": { + "methods": [ + "update_data_store" + ] + } + } + } + } + }, "DocumentService": { "clients": { "grpc": { @@ -272,6 +396,100 @@ } } }, + "EngineService": { + "clients": { + "grpc": { + "libraryClient": "EngineServiceClient", + "rpcs": { + "CreateEngine": { + "methods": [ + "create_engine" + ] + }, + "DeleteEngine": { + "methods": [ + "delete_engine" + ] + }, + "GetEngine": { + "methods": [ + "get_engine" + ] + }, + "ListEngines": { + "methods": [ + "list_engines" + ] + }, + "UpdateEngine": { + "methods": [ + "update_engine" + ] + } + } + }, + "grpc-async": { + "libraryClient": "EngineServiceAsyncClient", + "rpcs": { + "CreateEngine": { + "methods": [ + "create_engine" + ] + }, + "DeleteEngine": { + "methods": [ + "delete_engine" + ] + }, + "GetEngine": { + "methods": [ + "get_engine" + ] + }, + "ListEngines": { + "methods": [ + "list_engines" + ] + }, + "UpdateEngine": { + "methods": [ + "update_engine" + ] + } + } + }, + "rest": { + "libraryClient": "EngineServiceClient", + "rpcs": { + "CreateEngine": { + "methods": [ + "create_engine" + ] + }, + "DeleteEngine": { + "methods": [ + "delete_engine" + ] + }, + "GetEngine": { + "methods": [ + "get_engine" + ] + }, + "ListEngines": { + "methods": [ + "list_engines" + ] + }, + "UpdateEngine": { + "methods": [ + "update_engine" + ] + } + } + } + } + }, "RecommendationService": { "clients": { "grpc": { @@ -434,6 +652,269 @@ } } }, + "ServingConfigService": { + "clients": { + "grpc": { + "libraryClient": "ServingConfigServiceClient", + "rpcs": { + "GetServingConfig": { + "methods": [ + "get_serving_config" + ] + }, + "ListServingConfigs": { + "methods": [ + "list_serving_configs" + ] + }, + "UpdateServingConfig": { + "methods": [ + "update_serving_config" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ServingConfigServiceAsyncClient", + "rpcs": { + "GetServingConfig": { + "methods": [ + "get_serving_config" + ] + }, + "ListServingConfigs": { + "methods": [ + "list_serving_configs" + ] + }, + "UpdateServingConfig": { + "methods": [ + "update_serving_config" + ] + } + } + }, + "rest": { + "libraryClient": "ServingConfigServiceClient", + "rpcs": { + "GetServingConfig": { + "methods": [ + "get_serving_config" + ] + }, + "ListServingConfigs": { + "methods": [ + "list_serving_configs" + ] + }, + "UpdateServingConfig": { + "methods": [ + "update_serving_config" + ] + } + } + } + } + }, + "SiteSearchEngineService": { + "clients": { + "grpc": { + "libraryClient": "SiteSearchEngineServiceClient", + "rpcs": { + "BatchCreateTargetSites": { + "methods": [ + "batch_create_target_sites" + ] + }, + "BatchVerifyTargetSites": { + "methods": [ + "batch_verify_target_sites" + ] + }, + "CreateTargetSite": { + "methods": [ + "create_target_site" + ] + }, + "DeleteTargetSite": { + "methods": [ + "delete_target_site" + ] + }, + "DisableAdvancedSiteSearch": { + "methods": [ + "disable_advanced_site_search" + ] + }, + "EnableAdvancedSiteSearch": { + "methods": [ + "enable_advanced_site_search" + ] + }, + "FetchDomainVerificationStatus": { + "methods": [ + "fetch_domain_verification_status" + ] + }, + "GetSiteSearchEngine": { + "methods": [ + "get_site_search_engine" + ] + }, + "GetTargetSite": { + "methods": [ + "get_target_site" + ] + }, + "ListTargetSites": { + "methods": [ + "list_target_sites" + ] + }, + "RecrawlUris": { + "methods": [ + "recrawl_uris" + ] + }, + "UpdateTargetSite": { + "methods": [ + "update_target_site" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SiteSearchEngineServiceAsyncClient", + "rpcs": { + "BatchCreateTargetSites": { + "methods": [ + "batch_create_target_sites" + ] + }, + "BatchVerifyTargetSites": { + "methods": [ + "batch_verify_target_sites" + ] + }, + "CreateTargetSite": { + "methods": [ + "create_target_site" + ] + }, + "DeleteTargetSite": { + "methods": [ + "delete_target_site" + ] + }, + "DisableAdvancedSiteSearch": { + "methods": [ + "disable_advanced_site_search" + ] + }, + "EnableAdvancedSiteSearch": { + "methods": [ + "enable_advanced_site_search" + ] + }, + "FetchDomainVerificationStatus": { + "methods": [ + "fetch_domain_verification_status" + ] + }, + "GetSiteSearchEngine": { + "methods": [ + "get_site_search_engine" + ] + }, + "GetTargetSite": { + "methods": [ + "get_target_site" + ] + }, + "ListTargetSites": { + "methods": [ + "list_target_sites" + ] + }, + "RecrawlUris": { + "methods": [ + "recrawl_uris" + ] + }, + "UpdateTargetSite": { + "methods": [ + "update_target_site" + ] + } + } + }, + "rest": { + "libraryClient": "SiteSearchEngineServiceClient", + "rpcs": { + "BatchCreateTargetSites": { + "methods": [ + "batch_create_target_sites" + ] + }, + "BatchVerifyTargetSites": { + "methods": [ + "batch_verify_target_sites" + ] + }, + "CreateTargetSite": { + "methods": [ + "create_target_site" + ] + }, + "DeleteTargetSite": { + "methods": [ + "delete_target_site" + ] + }, + "DisableAdvancedSiteSearch": { + "methods": [ + "disable_advanced_site_search" + ] + }, + "EnableAdvancedSiteSearch": { + "methods": [ + "enable_advanced_site_search" + ] + }, + "FetchDomainVerificationStatus": { + "methods": [ + "fetch_domain_verification_status" + ] + }, + "GetSiteSearchEngine": { + "methods": [ + "get_site_search_engine" + ] + }, + "GetTargetSite": { + "methods": [ + "get_target_site" + ] + }, + "ListTargetSites": { + "methods": [ + "list_target_sites" + ] + }, + "RecrawlUris": { + "methods": [ + "recrawl_uris" + ] + }, + "UpdateTargetSite": { + "methods": [ + "update_target_site" + ] + } + } + } + } + }, "UserEventService": { "clients": { "grpc": { diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py index fb9a6cb2d900..360a0d13ebdd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.7" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/async_client.py index 2cc802cd3e43..922e37c54a71 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/async_client.py @@ -42,10 +42,16 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore -from google.cloud.discoveryengine_v1beta.types import completion_service +from google.cloud.discoveryengine_v1beta.types import ( + completion_service, + import_config, + purge_config, +) from .client import CompletionServiceClient from .transports.base import DEFAULT_CLIENT_INFO, CompletionServiceTransport @@ -343,6 +349,217 @@ async def sample_complete_query(): # Done; return the response. return response + async def import_suggestion_deny_list_entries( + self, + request: Optional[ + Union[import_config.ImportSuggestionDenyListEntriesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1beta.SuggestionDenyListEntry] + for a DataStore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_import_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1beta.CompletionServiceAsyncClient() + + # Initialize request argument(s) + inline_source = discoveryengine_v1beta.InlineSource() + inline_source.entries.block_phrase = "block_phrase_value" + inline_source.entries.match_operator = "CONTAINS" + + request = discoveryengine_v1beta.ImportSuggestionDenyListEntriesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.ImportSuggestionDenyListEntriesRequest, dict]]): + The request object. Request message for + [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.ImportSuggestionDenyListEntries] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.ImportSuggestionDenyListEntriesResponse` Response message for + [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.ImportSuggestionDenyListEntries] + method. + + """ + # Create or coerce a protobuf request object. + request = import_config.ImportSuggestionDenyListEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_suggestion_deny_list_entries, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + import_config.ImportSuggestionDenyListEntriesResponse, + metadata_type=import_config.ImportSuggestionDenyListEntriesMetadata, + ) + + # Done; return the response. + return response + + async def purge_suggestion_deny_list_entries( + self, + request: Optional[ + Union[purge_config.PurgeSuggestionDenyListEntriesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Permanently deletes all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1beta.SuggestionDenyListEntry] + for a DataStore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_purge_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1beta.CompletionServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.PurgeSuggestionDenyListEntriesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.purge_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.PurgeSuggestionDenyListEntriesRequest, dict]]): + The request object. Request message for + [CompletionService.PurgeSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.PurgeSuggestionDenyListEntries] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.PurgeSuggestionDenyListEntriesResponse` Response message for + [CompletionService.PurgeSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.PurgeSuggestionDenyListEntries] + method. + + """ + # Create or coerce a protobuf request object. + request = purge_config.PurgeSuggestionDenyListEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.purge_suggestion_deny_list_entries, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + purge_config.PurgeSuggestionDenyListEntriesResponse, + metadata_type=purge_config.PurgeSuggestionDenyListEntriesMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py index b0800a9e6aef..6e92b29ad19a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py @@ -47,10 +47,16 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore -from google.cloud.discoveryengine_v1beta.types import completion_service +from google.cloud.discoveryengine_v1beta.types import ( + completion_service, + import_config, + purge_config, +) from .transports.base import DEFAULT_CLIENT_INFO, CompletionServiceTransport from .transports.grpc import CompletionServiceGrpcTransport @@ -757,6 +763,225 @@ def sample_complete_query(): # Done; return the response. return response + def import_suggestion_deny_list_entries( + self, + request: Optional[ + Union[import_config.ImportSuggestionDenyListEntriesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1beta.SuggestionDenyListEntry] + for a DataStore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_import_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1beta.CompletionServiceClient() + + # Initialize request argument(s) + inline_source = discoveryengine_v1beta.InlineSource() + inline_source.entries.block_phrase = "block_phrase_value" + inline_source.entries.match_operator = "CONTAINS" + + request = discoveryengine_v1beta.ImportSuggestionDenyListEntriesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.ImportSuggestionDenyListEntriesRequest, dict]): + The request object. Request message for + [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.ImportSuggestionDenyListEntries] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.ImportSuggestionDenyListEntriesResponse` Response message for + [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.ImportSuggestionDenyListEntries] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a import_config.ImportSuggestionDenyListEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, import_config.ImportSuggestionDenyListEntriesRequest + ): + request = import_config.ImportSuggestionDenyListEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.import_suggestion_deny_list_entries + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + import_config.ImportSuggestionDenyListEntriesResponse, + metadata_type=import_config.ImportSuggestionDenyListEntriesMetadata, + ) + + # Done; return the response. + return response + + def purge_suggestion_deny_list_entries( + self, + request: Optional[ + Union[purge_config.PurgeSuggestionDenyListEntriesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Permanently deletes all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1beta.SuggestionDenyListEntry] + for a DataStore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_purge_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1beta.CompletionServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.PurgeSuggestionDenyListEntriesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.purge_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.PurgeSuggestionDenyListEntriesRequest, dict]): + The request object. Request message for + [CompletionService.PurgeSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.PurgeSuggestionDenyListEntries] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.PurgeSuggestionDenyListEntriesResponse` Response message for + [CompletionService.PurgeSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.PurgeSuggestionDenyListEntries] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a purge_config.PurgeSuggestionDenyListEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, purge_config.PurgeSuggestionDenyListEntriesRequest): + request = purge_config.PurgeSuggestionDenyListEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.purge_suggestion_deny_list_entries + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + purge_config.PurgeSuggestionDenyListEntriesResponse, + metadata_type=purge_config.PurgeSuggestionDenyListEntriesMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "CompletionServiceClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/base.py index 9a0a6b7e46ec..8fc8886ef994 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/base.py @@ -18,7 +18,7 @@ import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore @@ -27,7 +27,11 @@ from google.oauth2 import service_account # type: ignore from google.cloud.discoveryengine_v1beta import gapic_version as package_version -from google.cloud.discoveryengine_v1beta.types import completion_service +from google.cloud.discoveryengine_v1beta.types import ( + completion_service, + import_config, + purge_config, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -133,6 +137,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.import_suggestion_deny_list_entries: gapic_v1.method.wrap_method( + self.import_suggestion_deny_list_entries, + default_timeout=None, + client_info=client_info, + ), + self.purge_suggestion_deny_list_entries: gapic_v1.method.wrap_method( + self.purge_suggestion_deny_list_entries, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -144,6 +158,11 @@ def close(self): """ raise NotImplementedError() + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + @property def complete_query( self, @@ -156,6 +175,24 @@ def complete_query( ]: raise NotImplementedError() + @property + def import_suggestion_deny_list_entries( + self, + ) -> Callable[ + [import_config.ImportSuggestionDenyListEntriesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def purge_suggestion_deny_list_entries( + self, + ) -> Callable[ + [purge_config.PurgeSuggestionDenyListEntriesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc.py index a5fc86174ed9..ee7c762bf6e5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc.py @@ -16,7 +16,7 @@ from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, grpc_helpers +from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -24,7 +24,11 @@ from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore -from google.cloud.discoveryengine_v1beta.types import completion_service +from google.cloud.discoveryengine_v1beta.types import ( + completion_service, + import_config, + purge_config, +) from .base import DEFAULT_CLIENT_INFO, CompletionServiceTransport @@ -112,6 +116,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -230,6 +235,20 @@ def grpc_channel(self) -> grpc.Channel: """Return the channel designed to connect to this service.""" return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + @property def complete_query( self, @@ -260,6 +279,72 @@ def complete_query( ) return self._stubs["complete_query"] + @property + def import_suggestion_deny_list_entries( + self, + ) -> Callable[ + [import_config.ImportSuggestionDenyListEntriesRequest], operations_pb2.Operation + ]: + r"""Return a callable for the import suggestion deny list + entries method over gRPC. + + Imports all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1beta.SuggestionDenyListEntry] + for a DataStore. + + Returns: + Callable[[~.ImportSuggestionDenyListEntriesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_suggestion_deny_list_entries" not in self._stubs: + self._stubs[ + "import_suggestion_deny_list_entries" + ] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.CompletionService/ImportSuggestionDenyListEntries", + request_serializer=import_config.ImportSuggestionDenyListEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_suggestion_deny_list_entries"] + + @property + def purge_suggestion_deny_list_entries( + self, + ) -> Callable[ + [purge_config.PurgeSuggestionDenyListEntriesRequest], operations_pb2.Operation + ]: + r"""Return a callable for the purge suggestion deny list + entries method over gRPC. + + Permanently deletes all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1beta.SuggestionDenyListEntry] + for a DataStore. + + Returns: + Callable[[~.PurgeSuggestionDenyListEntriesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_suggestion_deny_list_entries" not in self._stubs: + self._stubs[ + "purge_suggestion_deny_list_entries" + ] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.CompletionService/PurgeSuggestionDenyListEntries", + request_serializer=purge_config.PurgeSuggestionDenyListEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_suggestion_deny_list_entries"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc_asyncio.py index 49b557874534..806dad571078 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/grpc_asyncio.py @@ -16,7 +16,7 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -24,7 +24,11 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.discoveryengine_v1beta.types import completion_service +from google.cloud.discoveryengine_v1beta.types import ( + completion_service, + import_config, + purge_config, +) from .base import DEFAULT_CLIENT_INFO, CompletionServiceTransport from .grpc import CompletionServiceGrpcTransport @@ -158,6 +162,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -233,6 +238,22 @@ def grpc_channel(self) -> aio.Channel: # Return the channel from cache. return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + @property def complete_query( self, @@ -263,6 +284,74 @@ def complete_query( ) return self._stubs["complete_query"] + @property + def import_suggestion_deny_list_entries( + self, + ) -> Callable[ + [import_config.ImportSuggestionDenyListEntriesRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the import suggestion deny list + entries method over gRPC. + + Imports all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1beta.SuggestionDenyListEntry] + for a DataStore. + + Returns: + Callable[[~.ImportSuggestionDenyListEntriesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_suggestion_deny_list_entries" not in self._stubs: + self._stubs[ + "import_suggestion_deny_list_entries" + ] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.CompletionService/ImportSuggestionDenyListEntries", + request_serializer=import_config.ImportSuggestionDenyListEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_suggestion_deny_list_entries"] + + @property + def purge_suggestion_deny_list_entries( + self, + ) -> Callable[ + [purge_config.PurgeSuggestionDenyListEntriesRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the purge suggestion deny list + entries method over gRPC. + + Permanently deletes all + [SuggestionDenyListEntry][google.cloud.discoveryengine.v1beta.SuggestionDenyListEntry] + for a DataStore. + + Returns: + Callable[[~.PurgeSuggestionDenyListEntriesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_suggestion_deny_list_entries" not in self._stubs: + self._stubs[ + "purge_suggestion_deny_list_entries" + ] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.CompletionService/PurgeSuggestionDenyListEntries", + request_serializer=purge_config.PurgeSuggestionDenyListEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_suggestion_deny_list_entries"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/rest.py index cf7af366c03c..963a182a9513 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/rest.py @@ -20,7 +20,13 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore @@ -39,7 +45,11 @@ from google.longrunning import operations_pb2 # type: ignore -from google.cloud.discoveryengine_v1beta.types import completion_service +from google.cloud.discoveryengine_v1beta.types import ( + completion_service, + import_config, + purge_config, +) from .base import CompletionServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -74,6 +84,22 @@ def post_complete_query(self, response): logging.log(f"Received response: {response}") return response + def pre_import_suggestion_deny_list_entries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_suggestion_deny_list_entries(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_purge_suggestion_deny_list_entries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_purge_suggestion_deny_list_entries(self, response): + logging.log(f"Received response: {response}") + return response + transport = CompletionServiceRestTransport(interceptor=MyCustomCompletionServiceInterceptor()) client = CompletionServiceClient(transport=transport) @@ -103,6 +129,56 @@ def post_complete_query( """ return response + def pre_import_suggestion_deny_list_entries( + self, + request: import_config.ImportSuggestionDenyListEntriesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + import_config.ImportSuggestionDenyListEntriesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for import_suggestion_deny_list_entries + + Override in a subclass to manipulate the request or metadata + before they are sent to the CompletionService server. + """ + return request, metadata + + def post_import_suggestion_deny_list_entries( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for import_suggestion_deny_list_entries + + Override in a subclass to manipulate the response + after it is returned by the CompletionService server but before + it is returned to user code. + """ + return response + + def pre_purge_suggestion_deny_list_entries( + self, + request: purge_config.PurgeSuggestionDenyListEntriesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + purge_config.PurgeSuggestionDenyListEntriesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for purge_suggestion_deny_list_entries + + Override in a subclass to manipulate the request or metadata + before they are sent to the CompletionService server. + """ + return request, metadata + + def post_purge_suggestion_deny_list_entries( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for purge_suggestion_deny_list_entries + + Override in a subclass to manipulate the response + after it is returned by the CompletionService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -241,11 +317,156 @@ def __init__( self._session = AuthorizedSession( self._credentials, default_host=self.DEFAULT_HOST ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) self._interceptor = interceptor or CompletionServiceRestInterceptor() self._prep_wrapped_messages(client_info) + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + class _CompleteQuery(CompletionServiceRestStub): def __hash__(self): return hash("CompleteQuery") @@ -343,6 +564,226 @@ def __call__( resp = self._interceptor.post_complete_query(resp) return resp + class _ImportSuggestionDenyListEntries(CompletionServiceRestStub): + def __hash__(self): + return hash("ImportSuggestionDenyListEntries") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: import_config.ImportSuggestionDenyListEntriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the import suggestion deny + list entries method over HTTP. + + Args: + request (~.import_config.ImportSuggestionDenyListEntriesRequest): + The request object. Request message for + [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.ImportSuggestionDenyListEntries] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*}/suggestionDenyListEntries:import", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*}/suggestionDenyListEntries:import", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_import_suggestion_deny_list_entries( + request, metadata + ) + pb_request = import_config.ImportSuggestionDenyListEntriesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_suggestion_deny_list_entries(resp) + return resp + + class _PurgeSuggestionDenyListEntries(CompletionServiceRestStub): + def __hash__(self): + return hash("PurgeSuggestionDenyListEntries") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: purge_config.PurgeSuggestionDenyListEntriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the purge suggestion deny + list entries method over HTTP. + + Args: + request (~.purge_config.PurgeSuggestionDenyListEntriesRequest): + The request object. Request message for + [CompletionService.PurgeSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.PurgeSuggestionDenyListEntries] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*}/suggestionDenyListEntries:purge", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/**}/suggestionDenyListEntries:purge", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_purge_suggestion_deny_list_entries( + request, metadata + ) + pb_request = purge_config.PurgeSuggestionDenyListEntriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_purge_suggestion_deny_list_entries(resp) + return resp + @property def complete_query( self, @@ -354,6 +795,26 @@ def complete_query( # In C++ this would require a dynamic_cast return self._CompleteQuery(self._session, self._host, self._interceptor) # type: ignore + @property + def import_suggestion_deny_list_entries( + self, + ) -> Callable[ + [import_config.ImportSuggestionDenyListEntriesRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportSuggestionDenyListEntries(self._session, self._host, self._interceptor) # type: ignore + + @property + def purge_suggestion_deny_list_entries( + self, + ) -> Callable[ + [purge_config.PurgeSuggestionDenyListEntriesRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PurgeSuggestionDenyListEntries(self._session, self._host, self._interceptor) # type: ignore + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -383,6 +844,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -497,6 +962,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/async_client.py index 693343257012..46db39e03934 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/async_client.py @@ -692,7 +692,7 @@ async def sample_update_conversation(): [Conversation][google.cloud.discoveryengine.v1beta.Conversation] to update. The following are NOT supported: - - [conversation.name][] + - [Conversation.name][google.cloud.discoveryengine.v1beta.Conversation.name] If not set or empty, all supported fields are updated. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py index e542e8eac618..76a388451eb6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py @@ -1166,7 +1166,7 @@ def sample_update_conversation(): [Conversation][google.cloud.discoveryengine.v1beta.Conversation] to update. The following are NOT supported: - - [conversation.name][] + - [Conversation.name][google.cloud.discoveryengine.v1beta.Conversation.name] If not set or empty, all supported fields are updated. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/rest.py index e5d12ef26574..5c5b8497afb8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/rest.py @@ -460,6 +460,11 @@ def __call__( "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}:converse", "body": "*", }, + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/conversations/*}:converse", + "body": "*", + }, ] request, metadata = self._interceptor.pre_converse_conversation( request, metadata @@ -569,6 +574,11 @@ def __call__( "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*}/conversations", "body": "conversation", }, + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/engines/*}/conversations", + "body": "conversation", + }, ] request, metadata = self._interceptor.pre_create_conversation( request, metadata @@ -668,6 +678,10 @@ def __call__( "method": "delete", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}", }, + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/conversations/*}", + }, ] request, metadata = self._interceptor.pre_delete_conversation( request, metadata @@ -756,6 +770,10 @@ def __call__( "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}", }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/conversations/*}", + }, ] request, metadata = self._interceptor.pre_get_conversation( request, metadata @@ -852,6 +870,10 @@ def __call__( "method": "get", "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*}/conversations", }, + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/engines/*}/conversations", + }, ] request, metadata = self._interceptor.pre_list_conversations( request, metadata @@ -951,6 +973,11 @@ def __call__( "uri": "/v1beta/{conversation.name=projects/*/locations/*/collections/*/dataStores/*/conversations/*}", "body": "conversation", }, + { + "method": "patch", + "uri": "/v1beta/{conversation.name=projects/*/locations/*/collections/*/engines/*/conversations/*}", + "body": "conversation", + }, ] request, metadata = self._interceptor.pre_update_conversation( request, metadata @@ -1100,6 +1127,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -1214,6 +1245,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/__init__.py new file mode 100644 index 000000000000..ff4ce7fbfad6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import DataStoreServiceAsyncClient +from .client import DataStoreServiceClient + +__all__ = ( + "DataStoreServiceClient", + "DataStoreServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/async_client.py new file mode 100644 index 000000000000..a698a0cca0db --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/async_client.py @@ -0,0 +1,1088 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.services.data_store_service import pagers +from google.cloud.discoveryengine_v1beta.types import data_store as gcd_data_store +from google.cloud.discoveryengine_v1beta.types import common +from google.cloud.discoveryengine_v1beta.types import data_store +from google.cloud.discoveryengine_v1beta.types import data_store_service + +from .client import DataStoreServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, DataStoreServiceTransport +from .transports.grpc_asyncio import DataStoreServiceGrpcAsyncIOTransport + + +class DataStoreServiceAsyncClient: + """Service for managing + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + configuration. + """ + + _client: DataStoreServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DataStoreServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataStoreServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DataStoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DataStoreServiceClient._DEFAULT_UNIVERSE + + collection_path = staticmethod(DataStoreServiceClient.collection_path) + parse_collection_path = staticmethod(DataStoreServiceClient.parse_collection_path) + data_store_path = staticmethod(DataStoreServiceClient.data_store_path) + parse_data_store_path = staticmethod(DataStoreServiceClient.parse_data_store_path) + common_billing_account_path = staticmethod( + DataStoreServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DataStoreServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DataStoreServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + DataStoreServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + DataStoreServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DataStoreServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(DataStoreServiceClient.common_project_path) + parse_common_project_path = staticmethod( + DataStoreServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(DataStoreServiceClient.common_location_path) + parse_common_location_path = staticmethod( + DataStoreServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataStoreServiceAsyncClient: The constructed client. + """ + return DataStoreServiceClient.from_service_account_info.__func__(DataStoreServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataStoreServiceAsyncClient: The constructed client. + """ + return DataStoreServiceClient.from_service_account_file.__func__(DataStoreServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DataStoreServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DataStoreServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataStoreServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(DataStoreServiceClient).get_transport_class, type(DataStoreServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DataStoreServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data store service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DataStoreServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataStoreServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_data_store( + self, + request: Optional[ + Union[data_store_service.CreateDataStoreRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + data_store: Optional[gcd_data_store.DataStore] = None, + data_store_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + DataStore is for storing + [Documents][google.cloud.discoveryengine.v1beta.Document]. To + serve these documents for Search, or Recommendation use case, an + [Engine][google.cloud.discoveryengine.v1beta.Engine] needs to be + created separately. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_create_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1beta.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1beta.CreateDataStoreRequest( + parent="parent_value", + data_store=data_store, + data_store_id="data_store_id_value", + ) + + # Make the request + operation = client.create_data_store(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.CreateDataStoreRequest, dict]]): + The request object. Request for + [DataStoreService.CreateDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.CreateDataStore] + method. + parent (:class:`str`): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_store (:class:`google.cloud.discoveryengine_v1beta.types.DataStore`): + Required. The + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + to create. + + This corresponds to the ``data_store`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_store_id (:class:`str`): + Required. The ID to use for the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + which will become the final component of the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]'s + resource name. + + This field must conform to + `RFC-1034 `__ + standard with a length limit of 63 characters. + Otherwise, an INVALID_ARGUMENT error is returned. + + This corresponds to the ``data_store_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1beta.types.DataStore` + DataStore captures global settings and configs at the + DataStore level. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, data_store, data_store_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_store_service.CreateDataStoreRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_store is not None: + request.data_store = data_store + if data_store_id is not None: + request.data_store_id = data_store_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_data_store, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcd_data_store.DataStore, + metadata_type=data_store_service.CreateDataStoreMetadata, + ) + + # Done; return the response. + return response + + async def get_data_store( + self, + request: Optional[Union[data_store_service.GetDataStoreRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> data_store.DataStore: + r"""Gets a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_get_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetDataStoreRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_store(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.GetDataStoreRequest, dict]]): + The request object. Request message for + [DataStoreService.GetDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.GetDataStore] + method. + name (:class:`str`): + Required. Full resource name of + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + such as + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + If the caller does not have permission to access the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.DataStore: + DataStore captures global settings + and configs at the DataStore level. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_store_service.GetDataStoreRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_data_store, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_data_stores( + self, + request: Optional[Union[data_store_service.ListDataStoresRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataStoresAsyncPager: + r"""Lists all the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]s + associated with the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_list_data_stores(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListDataStoresRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_stores(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.ListDataStoresRequest, dict]]): + The request object. Request message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1beta.DataStoreService.ListDataStores] + method. + parent (:class:`str`): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection_id}``. + + If the caller does not have permission to list + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]s + under this location, regardless of whether or not this + data store exists, a PERMISSION_DENIED error is + returned. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.data_store_service.pagers.ListDataStoresAsyncPager: + Response message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1beta.DataStoreService.ListDataStores] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_store_service.ListDataStoresRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_data_stores, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataStoresAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_data_store( + self, + request: Optional[ + Union[data_store_service.DeleteDataStoreRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_delete_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteDataStoreRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_store(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.DeleteDataStoreRequest, dict]]): + The request object. Request message for + [DataStoreService.DeleteDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.DeleteDataStore] + method. + name (:class:`str`): + Required. Full resource name of + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + such as + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + If the caller does not have permission to delete the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + to delete does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_store_service.DeleteDataStoreRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_data_store, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=data_store_service.DeleteDataStoreMetadata, + ) + + # Done; return the response. + return response + + async def update_data_store( + self, + request: Optional[ + Union[data_store_service.UpdateDataStoreRequest, dict] + ] = None, + *, + data_store: Optional[gcd_data_store.DataStore] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_data_store.DataStore: + r"""Updates a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_update_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1beta.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1beta.UpdateDataStoreRequest( + data_store=data_store, + ) + + # Make the request + response = await client.update_data_store(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.UpdateDataStoreRequest, dict]]): + The request object. Request message for + [DataStoreService.UpdateDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.UpdateDataStore] + method. + data_store (:class:`google.cloud.discoveryengine_v1beta.types.DataStore`): + Required. The + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + to update. + + If the caller does not have permission to update the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``data_store`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Indicates which fields in the provided + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + to update. + + If an unsupported or unknown field is provided, an + INVALID_ARGUMENT error is returned. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.DataStore: + DataStore captures global settings + and configs at the DataStore level. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_store, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_store_service.UpdateDataStoreRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_store is not None: + request.data_store = data_store + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_data_store, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store.name", request.data_store.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DataStoreServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DataStoreServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/client.py new file mode 100644 index 000000000000..d2bc8b9f9f7e --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/client.py @@ -0,0 +1,1530 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.services.data_store_service import pagers +from google.cloud.discoveryengine_v1beta.types import data_store as gcd_data_store +from google.cloud.discoveryengine_v1beta.types import common +from google.cloud.discoveryengine_v1beta.types import data_store +from google.cloud.discoveryengine_v1beta.types import data_store_service + +from .transports.base import DEFAULT_CLIENT_INFO, DataStoreServiceTransport +from .transports.grpc import DataStoreServiceGrpcTransport +from .transports.grpc_asyncio import DataStoreServiceGrpcAsyncIOTransport +from .transports.rest import DataStoreServiceRestTransport + + +class DataStoreServiceClientMeta(type): + """Metaclass for the DataStoreService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DataStoreServiceTransport]] + _transport_registry["grpc"] = DataStoreServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DataStoreServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DataStoreServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DataStoreServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataStoreServiceClient(metaclass=DataStoreServiceClientMeta): + """Service for managing + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + configuration. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataStoreServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataStoreServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataStoreServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataStoreServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def collection_path( + project: str, + location: str, + collection: str, + ) -> str: + """Returns a fully-qualified collection string.""" + return ( + "projects/{project}/locations/{location}/collections/{collection}".format( + project=project, + location=location, + collection=collection, + ) + ) + + @staticmethod + def parse_collection_path(path: str) -> Dict[str, str]: + """Parses a collection path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/collections/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = DataStoreServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = DataStoreServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DataStoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DataStoreServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = DataStoreServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or DataStoreServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataStoreServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data store service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DataStoreServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = DataStoreServiceClient._read_environment_variables() + self._client_cert_source = DataStoreServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = DataStoreServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DataStoreServiceTransport) + if transport_provided: + # transport is a DataStoreServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DataStoreServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or DataStoreServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(cast(str, transport)) + self._transport = Transport( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_data_store( + self, + request: Optional[ + Union[data_store_service.CreateDataStoreRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + data_store: Optional[gcd_data_store.DataStore] = None, + data_store_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + DataStore is for storing + [Documents][google.cloud.discoveryengine.v1beta.Document]. To + serve these documents for Search, or Recommendation use case, an + [Engine][google.cloud.discoveryengine.v1beta.Engine] needs to be + created separately. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_create_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1beta.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1beta.CreateDataStoreRequest( + parent="parent_value", + data_store=data_store, + data_store_id="data_store_id_value", + ) + + # Make the request + operation = client.create_data_store(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.CreateDataStoreRequest, dict]): + The request object. Request for + [DataStoreService.CreateDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.CreateDataStore] + method. + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_store (google.cloud.discoveryengine_v1beta.types.DataStore): + Required. The + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + to create. + + This corresponds to the ``data_store`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_store_id (str): + Required. The ID to use for the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + which will become the final component of the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]'s + resource name. + + This field must conform to + `RFC-1034 `__ + standard with a length limit of 63 characters. + Otherwise, an INVALID_ARGUMENT error is returned. + + This corresponds to the ``data_store_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1beta.types.DataStore` + DataStore captures global settings and configs at the + DataStore level. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, data_store, data_store_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_store_service.CreateDataStoreRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_store_service.CreateDataStoreRequest): + request = data_store_service.CreateDataStoreRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_store is not None: + request.data_store = data_store + if data_store_id is not None: + request.data_store_id = data_store_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_data_store] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcd_data_store.DataStore, + metadata_type=data_store_service.CreateDataStoreMetadata, + ) + + # Done; return the response. + return response + + def get_data_store( + self, + request: Optional[Union[data_store_service.GetDataStoreRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> data_store.DataStore: + r"""Gets a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_get_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetDataStoreRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_store(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.GetDataStoreRequest, dict]): + The request object. Request message for + [DataStoreService.GetDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.GetDataStore] + method. + name (str): + Required. Full resource name of + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + such as + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + If the caller does not have permission to access the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.DataStore: + DataStore captures global settings + and configs at the DataStore level. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_store_service.GetDataStoreRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_store_service.GetDataStoreRequest): + request = data_store_service.GetDataStoreRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_store] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_data_stores( + self, + request: Optional[Union[data_store_service.ListDataStoresRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataStoresPager: + r"""Lists all the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]s + associated with the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_list_data_stores(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListDataStoresRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_stores(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.ListDataStoresRequest, dict]): + The request object. Request message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1beta.DataStoreService.ListDataStores] + method. + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection_id}``. + + If the caller does not have permission to list + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]s + under this location, regardless of whether or not this + data store exists, a PERMISSION_DENIED error is + returned. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.data_store_service.pagers.ListDataStoresPager: + Response message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1beta.DataStoreService.ListDataStores] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_store_service.ListDataStoresRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_store_service.ListDataStoresRequest): + request = data_store_service.ListDataStoresRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_stores] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataStoresPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_data_store( + self, + request: Optional[ + Union[data_store_service.DeleteDataStoreRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_delete_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteDataStoreRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_store(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.DeleteDataStoreRequest, dict]): + The request object. Request message for + [DataStoreService.DeleteDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.DeleteDataStore] + method. + name (str): + Required. Full resource name of + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + such as + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + If the caller does not have permission to delete the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + to delete does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_store_service.DeleteDataStoreRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_store_service.DeleteDataStoreRequest): + request = data_store_service.DeleteDataStoreRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_data_store] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=data_store_service.DeleteDataStoreMetadata, + ) + + # Done; return the response. + return response + + def update_data_store( + self, + request: Optional[ + Union[data_store_service.UpdateDataStoreRequest, dict] + ] = None, + *, + data_store: Optional[gcd_data_store.DataStore] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_data_store.DataStore: + r"""Updates a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_update_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1beta.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1beta.UpdateDataStoreRequest( + data_store=data_store, + ) + + # Make the request + response = client.update_data_store(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.UpdateDataStoreRequest, dict]): + The request object. Request message for + [DataStoreService.UpdateDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.UpdateDataStore] + method. + data_store (google.cloud.discoveryengine_v1beta.types.DataStore): + Required. The + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + to update. + + If the caller does not have permission to update the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``data_store`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + to update. + + If an unsupported or unknown field is provided, an + INVALID_ARGUMENT error is returned. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.DataStore: + DataStore captures global settings + and configs at the DataStore level. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_store, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_store_service.UpdateDataStoreRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_store_service.UpdateDataStoreRequest): + request = data_store_service.UpdateDataStoreRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_store is not None: + request.data_store = data_store + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_store] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store.name", request.data_store.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DataStoreServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DataStoreServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/pagers.py new file mode 100644 index 000000000000..6e2b89c090ef --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1beta.types import data_store, data_store_service + + +class ListDataStoresPager: + """A pager for iterating through ``list_data_stores`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.ListDataStoresResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_stores`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataStores`` requests and continue to iterate + through the ``data_stores`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.ListDataStoresResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., data_store_service.ListDataStoresResponse], + request: data_store_service.ListDataStoresRequest, + response: data_store_service.ListDataStoresResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.ListDataStoresRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.ListDataStoresResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_store_service.ListDataStoresRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[data_store_service.ListDataStoresResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[data_store.DataStore]: + for page in self.pages: + yield from page.data_stores + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataStoresAsyncPager: + """A pager for iterating through ``list_data_stores`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.ListDataStoresResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_stores`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataStores`` requests and continue to iterate + through the ``data_stores`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.ListDataStoresResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[data_store_service.ListDataStoresResponse]], + request: data_store_service.ListDataStoresRequest, + response: data_store_service.ListDataStoresResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.ListDataStoresRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.ListDataStoresResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_store_service.ListDataStoresRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[data_store_service.ListDataStoresResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[data_store.DataStore]: + async def async_generator(): + async for page in self.pages: + for response in page.data_stores: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/__init__.py new file mode 100644 index 000000000000..6f5f07baebad --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataStoreServiceTransport +from .grpc import DataStoreServiceGrpcTransport +from .grpc_asyncio import DataStoreServiceGrpcAsyncIOTransport +from .rest import DataStoreServiceRestInterceptor, DataStoreServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DataStoreServiceTransport]] +_transport_registry["grpc"] = DataStoreServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DataStoreServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DataStoreServiceRestTransport + +__all__ = ( + "DataStoreServiceTransport", + "DataStoreServiceGrpcTransport", + "DataStoreServiceGrpcAsyncIOTransport", + "DataStoreServiceRestTransport", + "DataStoreServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/base.py new file mode 100644 index 000000000000..ea5eb5fbeaad --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/base.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version +from google.cloud.discoveryengine_v1beta.types import data_store as gcd_data_store +from google.cloud.discoveryengine_v1beta.types import data_store +from google.cloud.discoveryengine_v1beta.types import data_store_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class DataStoreServiceTransport(abc.ABC): + """Abstract transport class for DataStoreService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_data_store: gapic_v1.method.wrap_method( + self.create_data_store, + default_timeout=None, + client_info=client_info, + ), + self.get_data_store: gapic_v1.method.wrap_method( + self.get_data_store, + default_timeout=None, + client_info=client_info, + ), + self.list_data_stores: gapic_v1.method.wrap_method( + self.list_data_stores, + default_timeout=None, + client_info=client_info, + ), + self.delete_data_store: gapic_v1.method.wrap_method( + self.delete_data_store, + default_timeout=None, + client_info=client_info, + ), + self.update_data_store: gapic_v1.method.wrap_method( + self.update_data_store, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_data_store( + self, + ) -> Callable[ + [data_store_service.CreateDataStoreRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_data_store( + self, + ) -> Callable[ + [data_store_service.GetDataStoreRequest], + Union[data_store.DataStore, Awaitable[data_store.DataStore]], + ]: + raise NotImplementedError() + + @property + def list_data_stores( + self, + ) -> Callable[ + [data_store_service.ListDataStoresRequest], + Union[ + data_store_service.ListDataStoresResponse, + Awaitable[data_store_service.ListDataStoresResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_data_store( + self, + ) -> Callable[ + [data_store_service.DeleteDataStoreRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_data_store( + self, + ) -> Callable[ + [data_store_service.UpdateDataStoreRequest], + Union[gcd_data_store.DataStore, Awaitable[gcd_data_store.DataStore]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DataStoreServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/grpc.py new file mode 100644 index 000000000000..d8b4cf567e3d --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/grpc.py @@ -0,0 +1,447 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1beta.types import data_store as gcd_data_store +from google.cloud.discoveryengine_v1beta.types import data_store +from google.cloud.discoveryengine_v1beta.types import data_store_service + +from .base import DEFAULT_CLIENT_INFO, DataStoreServiceTransport + + +class DataStoreServiceGrpcTransport(DataStoreServiceTransport): + """gRPC backend transport for DataStoreService. + + Service for managing + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + configuration. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_data_store( + self, + ) -> Callable[ + [data_store_service.CreateDataStoreRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create data store method over gRPC. + + Creates a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + DataStore is for storing + [Documents][google.cloud.discoveryengine.v1beta.Document]. To + serve these documents for Search, or Recommendation use case, an + [Engine][google.cloud.discoveryengine.v1beta.Engine] needs to be + created separately. + + Returns: + Callable[[~.CreateDataStoreRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_data_store" not in self._stubs: + self._stubs["create_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DataStoreService/CreateDataStore", + request_serializer=data_store_service.CreateDataStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_data_store"] + + @property + def get_data_store( + self, + ) -> Callable[[data_store_service.GetDataStoreRequest], data_store.DataStore]: + r"""Return a callable for the get data store method over gRPC. + + Gets a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + Returns: + Callable[[~.GetDataStoreRequest], + ~.DataStore]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_store" not in self._stubs: + self._stubs["get_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DataStoreService/GetDataStore", + request_serializer=data_store_service.GetDataStoreRequest.serialize, + response_deserializer=data_store.DataStore.deserialize, + ) + return self._stubs["get_data_store"] + + @property + def list_data_stores( + self, + ) -> Callable[ + [data_store_service.ListDataStoresRequest], + data_store_service.ListDataStoresResponse, + ]: + r"""Return a callable for the list data stores method over gRPC. + + Lists all the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]s + associated with the project. + + Returns: + Callable[[~.ListDataStoresRequest], + ~.ListDataStoresResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_stores" not in self._stubs: + self._stubs["list_data_stores"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DataStoreService/ListDataStores", + request_serializer=data_store_service.ListDataStoresRequest.serialize, + response_deserializer=data_store_service.ListDataStoresResponse.deserialize, + ) + return self._stubs["list_data_stores"] + + @property + def delete_data_store( + self, + ) -> Callable[ + [data_store_service.DeleteDataStoreRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete data store method over gRPC. + + Deletes a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + Returns: + Callable[[~.DeleteDataStoreRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_data_store" not in self._stubs: + self._stubs["delete_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DataStoreService/DeleteDataStore", + request_serializer=data_store_service.DeleteDataStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_data_store"] + + @property + def update_data_store( + self, + ) -> Callable[ + [data_store_service.UpdateDataStoreRequest], gcd_data_store.DataStore + ]: + r"""Return a callable for the update data store method over gRPC. + + Updates a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + + Returns: + Callable[[~.UpdateDataStoreRequest], + ~.DataStore]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_store" not in self._stubs: + self._stubs["update_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DataStoreService/UpdateDataStore", + request_serializer=data_store_service.UpdateDataStoreRequest.serialize, + response_deserializer=gcd_data_store.DataStore.deserialize, + ) + return self._stubs["update_data_store"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DataStoreServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..974d71827fbc --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/grpc_asyncio.py @@ -0,0 +1,450 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1beta.types import data_store as gcd_data_store +from google.cloud.discoveryengine_v1beta.types import data_store +from google.cloud.discoveryengine_v1beta.types import data_store_service + +from .base import DEFAULT_CLIENT_INFO, DataStoreServiceTransport +from .grpc import DataStoreServiceGrpcTransport + + +class DataStoreServiceGrpcAsyncIOTransport(DataStoreServiceTransport): + """gRPC AsyncIO backend transport for DataStoreService. + + Service for managing + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + configuration. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_data_store( + self, + ) -> Callable[ + [data_store_service.CreateDataStoreRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create data store method over gRPC. + + Creates a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + DataStore is for storing + [Documents][google.cloud.discoveryengine.v1beta.Document]. To + serve these documents for Search, or Recommendation use case, an + [Engine][google.cloud.discoveryengine.v1beta.Engine] needs to be + created separately. + + Returns: + Callable[[~.CreateDataStoreRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_data_store" not in self._stubs: + self._stubs["create_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DataStoreService/CreateDataStore", + request_serializer=data_store_service.CreateDataStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_data_store"] + + @property + def get_data_store( + self, + ) -> Callable[ + [data_store_service.GetDataStoreRequest], Awaitable[data_store.DataStore] + ]: + r"""Return a callable for the get data store method over gRPC. + + Gets a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + Returns: + Callable[[~.GetDataStoreRequest], + Awaitable[~.DataStore]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_store" not in self._stubs: + self._stubs["get_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DataStoreService/GetDataStore", + request_serializer=data_store_service.GetDataStoreRequest.serialize, + response_deserializer=data_store.DataStore.deserialize, + ) + return self._stubs["get_data_store"] + + @property + def list_data_stores( + self, + ) -> Callable[ + [data_store_service.ListDataStoresRequest], + Awaitable[data_store_service.ListDataStoresResponse], + ]: + r"""Return a callable for the list data stores method over gRPC. + + Lists all the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]s + associated with the project. + + Returns: + Callable[[~.ListDataStoresRequest], + Awaitable[~.ListDataStoresResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_stores" not in self._stubs: + self._stubs["list_data_stores"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DataStoreService/ListDataStores", + request_serializer=data_store_service.ListDataStoresRequest.serialize, + response_deserializer=data_store_service.ListDataStoresResponse.deserialize, + ) + return self._stubs["list_data_stores"] + + @property + def delete_data_store( + self, + ) -> Callable[ + [data_store_service.DeleteDataStoreRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete data store method over gRPC. + + Deletes a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + Returns: + Callable[[~.DeleteDataStoreRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_data_store" not in self._stubs: + self._stubs["delete_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DataStoreService/DeleteDataStore", + request_serializer=data_store_service.DeleteDataStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_data_store"] + + @property + def update_data_store( + self, + ) -> Callable[ + [data_store_service.UpdateDataStoreRequest], Awaitable[gcd_data_store.DataStore] + ]: + r"""Return a callable for the update data store method over gRPC. + + Updates a + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + + Returns: + Callable[[~.UpdateDataStoreRequest], + Awaitable[~.DataStore]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_store" not in self._stubs: + self._stubs["update_data_store"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DataStoreService/UpdateDataStore", + request_serializer=data_store_service.UpdateDataStoreRequest.serialize, + response_deserializer=gcd_data_store.DataStore.deserialize, + ) + return self._stubs["update_data_store"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("DataStoreServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/rest.py new file mode 100644 index 000000000000..5d13496d5649 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/rest.py @@ -0,0 +1,1320 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.types import data_store as gcd_data_store +from google.cloud.discoveryengine_v1beta.types import data_store +from google.cloud.discoveryengine_v1beta.types import data_store_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DataStoreServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DataStoreServiceRestInterceptor: + """Interceptor for DataStoreService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DataStoreServiceRestTransport. + + .. code-block:: python + class MyCustomDataStoreServiceInterceptor(DataStoreServiceRestInterceptor): + def pre_create_data_store(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_data_store(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_data_store(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_data_store(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_data_store(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_store(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_data_stores(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_stores(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_data_store(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_data_store(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DataStoreServiceRestTransport(interceptor=MyCustomDataStoreServiceInterceptor()) + client = DataStoreServiceClient(transport=transport) + + + """ + + def pre_create_data_store( + self, + request: data_store_service.CreateDataStoreRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[data_store_service.CreateDataStoreRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_data_store + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_create_data_store( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_data_store + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + + def pre_delete_data_store( + self, + request: data_store_service.DeleteDataStoreRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[data_store_service.DeleteDataStoreRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_data_store + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_delete_data_store( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_data_store + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + + def pre_get_data_store( + self, + request: data_store_service.GetDataStoreRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[data_store_service.GetDataStoreRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_data_store + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_get_data_store( + self, response: data_store.DataStore + ) -> data_store.DataStore: + """Post-rpc interceptor for get_data_store + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + + def pre_list_data_stores( + self, + request: data_store_service.ListDataStoresRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[data_store_service.ListDataStoresRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_data_stores + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_list_data_stores( + self, response: data_store_service.ListDataStoresResponse + ) -> data_store_service.ListDataStoresResponse: + """Post-rpc interceptor for list_data_stores + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + + def pre_update_data_store( + self, + request: data_store_service.UpdateDataStoreRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[data_store_service.UpdateDataStoreRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_data_store + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_update_data_store( + self, response: gcd_data_store.DataStore + ) -> gcd_data_store.DataStore: + """Post-rpc interceptor for update_data_store + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataStoreService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DataStoreService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DataStoreServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DataStoreServiceRestInterceptor + + +class DataStoreServiceRestTransport(DataStoreServiceTransport): + """REST backend transport for DataStoreService. + + Service for managing + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + configuration. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DataStoreServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DataStoreServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateDataStore(DataStoreServiceRestStub): + def __hash__(self): + return hash("CreateDataStore") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "dataStoreId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: data_store_service.CreateDataStoreRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create data store method over HTTP. + + Args: + request (~.data_store_service.CreateDataStoreRequest): + The request object. Request for + [DataStoreService.CreateDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.CreateDataStore] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*}/dataStores", + "body": "data_store", + }, + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*}/dataStores", + "body": "data_store", + }, + ] + request, metadata = self._interceptor.pre_create_data_store( + request, metadata + ) + pb_request = data_store_service.CreateDataStoreRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_data_store(resp) + return resp + + class _DeleteDataStore(DataStoreServiceRestStub): + def __hash__(self): + return hash("DeleteDataStore") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: data_store_service.DeleteDataStoreRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete data store method over HTTP. + + Args: + request (~.data_store_service.DeleteDataStoreRequest): + The request object. Request message for + [DataStoreService.DeleteDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.DeleteDataStore] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}", + }, + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_data_store( + request, metadata + ) + pb_request = data_store_service.DeleteDataStoreRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_data_store(resp) + return resp + + class _GetDataStore(DataStoreServiceRestStub): + def __hash__(self): + return hash("GetDataStore") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: data_store_service.GetDataStoreRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> data_store.DataStore: + r"""Call the get data store method over HTTP. + + Args: + request (~.data_store_service.GetDataStoreRequest): + The request object. Request message for + [DataStoreService.GetDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.GetDataStore] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.data_store.DataStore: + DataStore captures global settings + and configs at the DataStore level. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}", + }, + ] + request, metadata = self._interceptor.pre_get_data_store(request, metadata) + pb_request = data_store_service.GetDataStoreRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_store.DataStore() + pb_resp = data_store.DataStore.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_data_store(resp) + return resp + + class _ListDataStores(DataStoreServiceRestStub): + def __hash__(self): + return hash("ListDataStores") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: data_store_service.ListDataStoresRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> data_store_service.ListDataStoresResponse: + r"""Call the list data stores method over HTTP. + + Args: + request (~.data_store_service.ListDataStoresRequest): + The request object. Request message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1beta.DataStoreService.ListDataStores] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.data_store_service.ListDataStoresResponse: + Response message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1beta.DataStoreService.ListDataStores] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*}/dataStores", + }, + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*}/dataStores", + }, + ] + request, metadata = self._interceptor.pre_list_data_stores( + request, metadata + ) + pb_request = data_store_service.ListDataStoresRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_store_service.ListDataStoresResponse() + pb_resp = data_store_service.ListDataStoresResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_data_stores(resp) + return resp + + class _UpdateDataStore(DataStoreServiceRestStub): + def __hash__(self): + return hash("UpdateDataStore") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: data_store_service.UpdateDataStoreRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_data_store.DataStore: + r"""Call the update data store method over HTTP. + + Args: + request (~.data_store_service.UpdateDataStoreRequest): + The request object. Request message for + [DataStoreService.UpdateDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.UpdateDataStore] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_data_store.DataStore: + DataStore captures global settings + and configs at the DataStore level. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{data_store.name=projects/*/locations/*/dataStores/*}", + "body": "data_store", + }, + { + "method": "patch", + "uri": "/v1beta/{data_store.name=projects/*/locations/*/collections/*/dataStores/*}", + "body": "data_store", + }, + ] + request, metadata = self._interceptor.pre_update_data_store( + request, metadata + ) + pb_request = data_store_service.UpdateDataStoreRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_data_store.DataStore() + pb_resp = gcd_data_store.DataStore.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_data_store(resp) + return resp + + @property + def create_data_store( + self, + ) -> Callable[ + [data_store_service.CreateDataStoreRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDataStore(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_data_store( + self, + ) -> Callable[ + [data_store_service.DeleteDataStoreRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDataStore(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_data_store( + self, + ) -> Callable[[data_store_service.GetDataStoreRequest], data_store.DataStore]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataStore(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_data_stores( + self, + ) -> Callable[ + [data_store_service.ListDataStoresRequest], + data_store_service.ListDataStoresResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataStores(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_data_store( + self, + ) -> Callable[ + [data_store_service.UpdateDataStoreRequest], gcd_data_store.DataStore + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDataStore(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(DataStoreServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(DataStoreServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DataStoreServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py index 28a5a482e525..c8b2b8e6ba1c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py @@ -440,6 +440,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -494,6 +498,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, ], "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", @@ -1340,6 +1348,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -1454,6 +1466,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/__init__.py new file mode 100644 index 000000000000..642a2e63fcb4 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import EngineServiceAsyncClient +from .client import EngineServiceClient + +__all__ = ( + "EngineServiceClient", + "EngineServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/async_client.py new file mode 100644 index 000000000000..c448404d8f40 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/async_client.py @@ -0,0 +1,1056 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.services.engine_service import pagers +from google.cloud.discoveryengine_v1beta.types import common +from google.cloud.discoveryengine_v1beta.types import engine +from google.cloud.discoveryengine_v1beta.types import engine as gcd_engine +from google.cloud.discoveryengine_v1beta.types import engine_service + +from .client import EngineServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, EngineServiceTransport +from .transports.grpc_asyncio import EngineServiceGrpcAsyncIOTransport + + +class EngineServiceAsyncClient: + """Service for managing + [Engine][google.cloud.discoveryengine.v1beta.Engine] configuration. + """ + + _client: EngineServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = EngineServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = EngineServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = EngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = EngineServiceClient._DEFAULT_UNIVERSE + + collection_path = staticmethod(EngineServiceClient.collection_path) + parse_collection_path = staticmethod(EngineServiceClient.parse_collection_path) + engine_path = staticmethod(EngineServiceClient.engine_path) + parse_engine_path = staticmethod(EngineServiceClient.parse_engine_path) + common_billing_account_path = staticmethod( + EngineServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + EngineServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(EngineServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + EngineServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + EngineServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + EngineServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(EngineServiceClient.common_project_path) + parse_common_project_path = staticmethod( + EngineServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(EngineServiceClient.common_location_path) + parse_common_location_path = staticmethod( + EngineServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EngineServiceAsyncClient: The constructed client. + """ + return EngineServiceClient.from_service_account_info.__func__(EngineServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EngineServiceAsyncClient: The constructed client. + """ + return EngineServiceClient.from_service_account_file.__func__(EngineServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return EngineServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> EngineServiceTransport: + """Returns the transport used by the client instance. + + Returns: + EngineServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(EngineServiceClient).get_transport_class, type(EngineServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, EngineServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the engine service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.EngineServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = EngineServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_engine( + self, + request: Optional[Union[engine_service.CreateEngineRequest, dict]] = None, + *, + parent: Optional[str] = None, + engine: Optional[gcd_engine.Engine] = None, + engine_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_create_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceAsyncClient() + + # Initialize request argument(s) + engine = discoveryengine_v1beta.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1beta.CreateEngineRequest( + parent="parent_value", + engine=engine, + engine_id="engine_id_value", + ) + + # Make the request + operation = client.create_engine(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.CreateEngineRequest, dict]]): + The request object. Request for + [EngineService.CreateEngine][google.cloud.discoveryengine.v1beta.EngineService.CreateEngine] + method. + parent (:class:`str`): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + engine (:class:`google.cloud.discoveryengine_v1beta.types.Engine`): + Required. The + [Engine][google.cloud.discoveryengine.v1beta.Engine] to + create. + + This corresponds to the ``engine`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + engine_id (:class:`str`): + Required. The ID to use for the + [Engine][google.cloud.discoveryengine.v1beta.Engine], + which will become the final component of the + [Engine][google.cloud.discoveryengine.v1beta.Engine]'s + resource name. + + This field must conform to + `RFC-1034 `__ + standard with a length limit of 63 characters. + Otherwise, an INVALID_ARGUMENT error is returned. + + This corresponds to the ``engine_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.Engine` Metadata that describes the training and serving parameters of an + [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, engine, engine_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = engine_service.CreateEngineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if engine is not None: + request.engine = engine + if engine_id is not None: + request.engine_id = engine_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_engine, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcd_engine.Engine, + metadata_type=engine_service.CreateEngineMetadata, + ) + + # Done; return the response. + return response + + async def delete_engine( + self, + request: Optional[Union[engine_service.DeleteEngineRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_delete_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteEngineRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_engine(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.DeleteEngineRequest, dict]]): + The request object. Request message for + [EngineService.DeleteEngine][google.cloud.discoveryengine.v1beta.EngineService.DeleteEngine] + method. + name (:class:`str`): + Required. Full resource name of + [Engine][google.cloud.discoveryengine.v1beta.Engine], + such as + ``projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}``. + + If the caller does not have permission to delete the + [Engine][google.cloud.discoveryengine.v1beta.Engine], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [Engine][google.cloud.discoveryengine.v1beta.Engine] to + delete does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = engine_service.DeleteEngineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_engine, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=engine_service.DeleteEngineMetadata, + ) + + # Done; return the response. + return response + + async def update_engine( + self, + request: Optional[Union[engine_service.UpdateEngineRequest, dict]] = None, + *, + engine: Optional[gcd_engine.Engine] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_engine.Engine: + r"""Updates an [Engine][google.cloud.discoveryengine.v1beta.Engine] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_update_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceAsyncClient() + + # Initialize request argument(s) + engine = discoveryengine_v1beta.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1beta.UpdateEngineRequest( + engine=engine, + ) + + # Make the request + response = await client.update_engine(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.UpdateEngineRequest, dict]]): + The request object. Request message for + [EngineService.UpdateEngine][google.cloud.discoveryengine.v1beta.EngineService.UpdateEngine] + method. + engine (:class:`google.cloud.discoveryengine_v1beta.types.Engine`): + Required. The + [Engine][google.cloud.discoveryengine.v1beta.Engine] to + update. + + If the caller does not have permission to update the + [Engine][google.cloud.discoveryengine.v1beta.Engine], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [Engine][google.cloud.discoveryengine.v1beta.Engine] to + update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``engine`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Indicates which fields in the provided + [Engine][google.cloud.discoveryengine.v1beta.Engine] to + update. + + If an unsupported or unknown field is provided, an + INVALID_ARGUMENT error is returned. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.Engine: + Metadata that describes the training and serving parameters of an + [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([engine, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = engine_service.UpdateEngineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if engine is not None: + request.engine = engine + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_engine, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("engine.name", request.engine.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_engine( + self, + request: Optional[Union[engine_service.GetEngineRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> engine.Engine: + r"""Gets a [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_get_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetEngineRequest( + name="name_value", + ) + + # Make the request + response = await client.get_engine(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.GetEngineRequest, dict]]): + The request object. Request message for + [EngineService.GetEngine][google.cloud.discoveryengine.v1beta.EngineService.GetEngine] + method. + name (:class:`str`): + Required. Full resource name of + [Engine][google.cloud.discoveryengine.v1beta.Engine], + such as + ``projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.Engine: + Metadata that describes the training and serving parameters of an + [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = engine_service.GetEngineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_engine, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_engines( + self, + request: Optional[Union[engine_service.ListEnginesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEnginesAsyncPager: + r"""Lists all the + [Engine][google.cloud.discoveryengine.v1beta.Engine]s associated + with the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_list_engines(): + # Create a client + client = discoveryengine_v1beta.EngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListEnginesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_engines(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.ListEnginesRequest, dict]]): + The request object. Request message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1beta.EngineService.ListEngines] + method. + parent (:class:`str`): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.engine_service.pagers.ListEnginesAsyncPager: + Response message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1beta.EngineService.ListEngines] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = engine_service.ListEnginesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_engines, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEnginesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "EngineServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("EngineServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/client.py new file mode 100644 index 000000000000..4f5257150996 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/client.py @@ -0,0 +1,1498 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.services.engine_service import pagers +from google.cloud.discoveryengine_v1beta.types import common +from google.cloud.discoveryengine_v1beta.types import engine +from google.cloud.discoveryengine_v1beta.types import engine as gcd_engine +from google.cloud.discoveryengine_v1beta.types import engine_service + +from .transports.base import DEFAULT_CLIENT_INFO, EngineServiceTransport +from .transports.grpc import EngineServiceGrpcTransport +from .transports.grpc_asyncio import EngineServiceGrpcAsyncIOTransport +from .transports.rest import EngineServiceRestTransport + + +class EngineServiceClientMeta(type): + """Metaclass for the EngineService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[EngineServiceTransport]] + _transport_registry["grpc"] = EngineServiceGrpcTransport + _transport_registry["grpc_asyncio"] = EngineServiceGrpcAsyncIOTransport + _transport_registry["rest"] = EngineServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[EngineServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class EngineServiceClient(metaclass=EngineServiceClientMeta): + """Service for managing + [Engine][google.cloud.discoveryengine.v1beta.Engine] configuration. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EngineServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EngineServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> EngineServiceTransport: + """Returns the transport used by the client instance. + + Returns: + EngineServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def collection_path( + project: str, + location: str, + collection: str, + ) -> str: + """Returns a fully-qualified collection string.""" + return ( + "projects/{project}/locations/{location}/collections/{collection}".format( + project=project, + location=location, + collection=collection, + ) + ) + + @staticmethod + def parse_collection_path(path: str) -> Dict[str, str]: + """Parses a collection path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/collections/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def engine_path( + project: str, + location: str, + collection: str, + engine: str, + ) -> str: + """Returns a fully-qualified engine string.""" + return "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + + @staticmethod + def parse_engine_path(path: str) -> Dict[str, str]: + """Parses a engine path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/collections/(?P.+?)/engines/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = EngineServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = EngineServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = EngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = EngineServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = EngineServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or EngineServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, EngineServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the engine service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, EngineServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = EngineServiceClient._read_environment_variables() + self._client_cert_source = EngineServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = EngineServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, EngineServiceTransport) + if transport_provided: + # transport is a EngineServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(EngineServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or EngineServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(cast(str, transport)) + self._transport = Transport( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def create_engine( + self, + request: Optional[Union[engine_service.CreateEngineRequest, dict]] = None, + *, + parent: Optional[str] = None, + engine: Optional[gcd_engine.Engine] = None, + engine_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_create_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceClient() + + # Initialize request argument(s) + engine = discoveryengine_v1beta.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1beta.CreateEngineRequest( + parent="parent_value", + engine=engine, + engine_id="engine_id_value", + ) + + # Make the request + operation = client.create_engine(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.CreateEngineRequest, dict]): + The request object. Request for + [EngineService.CreateEngine][google.cloud.discoveryengine.v1beta.EngineService.CreateEngine] + method. + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + engine (google.cloud.discoveryengine_v1beta.types.Engine): + Required. The + [Engine][google.cloud.discoveryengine.v1beta.Engine] to + create. + + This corresponds to the ``engine`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + engine_id (str): + Required. The ID to use for the + [Engine][google.cloud.discoveryengine.v1beta.Engine], + which will become the final component of the + [Engine][google.cloud.discoveryengine.v1beta.Engine]'s + resource name. + + This field must conform to + `RFC-1034 `__ + standard with a length limit of 63 characters. + Otherwise, an INVALID_ARGUMENT error is returned. + + This corresponds to the ``engine_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.Engine` Metadata that describes the training and serving parameters of an + [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, engine, engine_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a engine_service.CreateEngineRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, engine_service.CreateEngineRequest): + request = engine_service.CreateEngineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if engine is not None: + request.engine = engine + if engine_id is not None: + request.engine_id = engine_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_engine] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcd_engine.Engine, + metadata_type=engine_service.CreateEngineMetadata, + ) + + # Done; return the response. + return response + + def delete_engine( + self, + request: Optional[Union[engine_service.DeleteEngineRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_delete_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteEngineRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_engine(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.DeleteEngineRequest, dict]): + The request object. Request message for + [EngineService.DeleteEngine][google.cloud.discoveryengine.v1beta.EngineService.DeleteEngine] + method. + name (str): + Required. Full resource name of + [Engine][google.cloud.discoveryengine.v1beta.Engine], + such as + ``projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}``. + + If the caller does not have permission to delete the + [Engine][google.cloud.discoveryengine.v1beta.Engine], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [Engine][google.cloud.discoveryengine.v1beta.Engine] to + delete does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a engine_service.DeleteEngineRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, engine_service.DeleteEngineRequest): + request = engine_service.DeleteEngineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_engine] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=engine_service.DeleteEngineMetadata, + ) + + # Done; return the response. + return response + + def update_engine( + self, + request: Optional[Union[engine_service.UpdateEngineRequest, dict]] = None, + *, + engine: Optional[gcd_engine.Engine] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_engine.Engine: + r"""Updates an [Engine][google.cloud.discoveryengine.v1beta.Engine] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_update_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceClient() + + # Initialize request argument(s) + engine = discoveryengine_v1beta.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1beta.UpdateEngineRequest( + engine=engine, + ) + + # Make the request + response = client.update_engine(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.UpdateEngineRequest, dict]): + The request object. Request message for + [EngineService.UpdateEngine][google.cloud.discoveryengine.v1beta.EngineService.UpdateEngine] + method. + engine (google.cloud.discoveryengine_v1beta.types.Engine): + Required. The + [Engine][google.cloud.discoveryengine.v1beta.Engine] to + update. + + If the caller does not have permission to update the + [Engine][google.cloud.discoveryengine.v1beta.Engine], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [Engine][google.cloud.discoveryengine.v1beta.Engine] to + update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``engine`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [Engine][google.cloud.discoveryengine.v1beta.Engine] to + update. + + If an unsupported or unknown field is provided, an + INVALID_ARGUMENT error is returned. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.Engine: + Metadata that describes the training and serving parameters of an + [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([engine, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a engine_service.UpdateEngineRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, engine_service.UpdateEngineRequest): + request = engine_service.UpdateEngineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if engine is not None: + request.engine = engine + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_engine] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("engine.name", request.engine.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_engine( + self, + request: Optional[Union[engine_service.GetEngineRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> engine.Engine: + r"""Gets a [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_get_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetEngineRequest( + name="name_value", + ) + + # Make the request + response = client.get_engine(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.GetEngineRequest, dict]): + The request object. Request message for + [EngineService.GetEngine][google.cloud.discoveryengine.v1beta.EngineService.GetEngine] + method. + name (str): + Required. Full resource name of + [Engine][google.cloud.discoveryengine.v1beta.Engine], + such as + ``projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.Engine: + Metadata that describes the training and serving parameters of an + [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a engine_service.GetEngineRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, engine_service.GetEngineRequest): + request = engine_service.GetEngineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_engine] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_engines( + self, + request: Optional[Union[engine_service.ListEnginesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEnginesPager: + r"""Lists all the + [Engine][google.cloud.discoveryengine.v1beta.Engine]s associated + with the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_list_engines(): + # Create a client + client = discoveryengine_v1beta.EngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListEnginesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_engines(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.ListEnginesRequest, dict]): + The request object. Request message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1beta.EngineService.ListEngines] + method. + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.engine_service.pagers.ListEnginesPager: + Response message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1beta.EngineService.ListEngines] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a engine_service.ListEnginesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, engine_service.ListEnginesRequest): + request = engine_service.ListEnginesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_engines] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEnginesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "EngineServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("EngineServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/pagers.py new file mode 100644 index 000000000000..6ed47051f1e3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1beta.types import engine, engine_service + + +class ListEnginesPager: + """A pager for iterating through ``list_engines`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.ListEnginesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``engines`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEngines`` requests and continue to iterate + through the ``engines`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.ListEnginesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., engine_service.ListEnginesResponse], + request: engine_service.ListEnginesRequest, + response: engine_service.ListEnginesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.ListEnginesRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.ListEnginesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = engine_service.ListEnginesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[engine_service.ListEnginesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[engine.Engine]: + for page in self.pages: + yield from page.engines + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListEnginesAsyncPager: + """A pager for iterating through ``list_engines`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.ListEnginesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``engines`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEngines`` requests and continue to iterate + through the ``engines`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.ListEnginesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[engine_service.ListEnginesResponse]], + request: engine_service.ListEnginesRequest, + response: engine_service.ListEnginesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.ListEnginesRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.ListEnginesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = engine_service.ListEnginesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[engine_service.ListEnginesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[engine.Engine]: + async def async_generator(): + async for page in self.pages: + for response in page.engines: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/__init__.py new file mode 100644 index 000000000000..4b34767ce1ca --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import EngineServiceTransport +from .grpc import EngineServiceGrpcTransport +from .grpc_asyncio import EngineServiceGrpcAsyncIOTransport +from .rest import EngineServiceRestInterceptor, EngineServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[EngineServiceTransport]] +_transport_registry["grpc"] = EngineServiceGrpcTransport +_transport_registry["grpc_asyncio"] = EngineServiceGrpcAsyncIOTransport +_transport_registry["rest"] = EngineServiceRestTransport + +__all__ = ( + "EngineServiceTransport", + "EngineServiceGrpcTransport", + "EngineServiceGrpcAsyncIOTransport", + "EngineServiceRestTransport", + "EngineServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/base.py new file mode 100644 index 000000000000..051a160a815d --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/base.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version +from google.cloud.discoveryengine_v1beta.types import engine +from google.cloud.discoveryengine_v1beta.types import engine as gcd_engine +from google.cloud.discoveryengine_v1beta.types import engine_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class EngineServiceTransport(abc.ABC): + """Abstract transport class for EngineService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_engine: gapic_v1.method.wrap_method( + self.create_engine, + default_timeout=None, + client_info=client_info, + ), + self.delete_engine: gapic_v1.method.wrap_method( + self.delete_engine, + default_timeout=None, + client_info=client_info, + ), + self.update_engine: gapic_v1.method.wrap_method( + self.update_engine, + default_timeout=None, + client_info=client_info, + ), + self.get_engine: gapic_v1.method.wrap_method( + self.get_engine, + default_timeout=None, + client_info=client_info, + ), + self.list_engines: gapic_v1.method.wrap_method( + self.list_engines, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_engine( + self, + ) -> Callable[ + [engine_service.CreateEngineRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_engine( + self, + ) -> Callable[ + [engine_service.DeleteEngineRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_engine( + self, + ) -> Callable[ + [engine_service.UpdateEngineRequest], + Union[gcd_engine.Engine, Awaitable[gcd_engine.Engine]], + ]: + raise NotImplementedError() + + @property + def get_engine( + self, + ) -> Callable[ + [engine_service.GetEngineRequest], + Union[engine.Engine, Awaitable[engine.Engine]], + ]: + raise NotImplementedError() + + @property + def list_engines( + self, + ) -> Callable[ + [engine_service.ListEnginesRequest], + Union[ + engine_service.ListEnginesResponse, + Awaitable[engine_service.ListEnginesResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("EngineServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/grpc.py new file mode 100644 index 000000000000..b62f67724aa2 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/grpc.py @@ -0,0 +1,427 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1beta.types import engine +from google.cloud.discoveryengine_v1beta.types import engine as gcd_engine +from google.cloud.discoveryengine_v1beta.types import engine_service + +from .base import DEFAULT_CLIENT_INFO, EngineServiceTransport + + +class EngineServiceGrpcTransport(EngineServiceTransport): + """gRPC backend transport for EngineService. + + Service for managing + [Engine][google.cloud.discoveryengine.v1beta.Engine] configuration. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_engine( + self, + ) -> Callable[[engine_service.CreateEngineRequest], operations_pb2.Operation]: + r"""Return a callable for the create engine method over gRPC. + + Creates a [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + Returns: + Callable[[~.CreateEngineRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_engine" not in self._stubs: + self._stubs["create_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.EngineService/CreateEngine", + request_serializer=engine_service.CreateEngineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_engine"] + + @property + def delete_engine( + self, + ) -> Callable[[engine_service.DeleteEngineRequest], operations_pb2.Operation]: + r"""Return a callable for the delete engine method over gRPC. + + Deletes a [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + Returns: + Callable[[~.DeleteEngineRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_engine" not in self._stubs: + self._stubs["delete_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.EngineService/DeleteEngine", + request_serializer=engine_service.DeleteEngineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_engine"] + + @property + def update_engine( + self, + ) -> Callable[[engine_service.UpdateEngineRequest], gcd_engine.Engine]: + r"""Return a callable for the update engine method over gRPC. + + Updates an [Engine][google.cloud.discoveryengine.v1beta.Engine] + + Returns: + Callable[[~.UpdateEngineRequest], + ~.Engine]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_engine" not in self._stubs: + self._stubs["update_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.EngineService/UpdateEngine", + request_serializer=engine_service.UpdateEngineRequest.serialize, + response_deserializer=gcd_engine.Engine.deserialize, + ) + return self._stubs["update_engine"] + + @property + def get_engine(self) -> Callable[[engine_service.GetEngineRequest], engine.Engine]: + r"""Return a callable for the get engine method over gRPC. + + Gets a [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + Returns: + Callable[[~.GetEngineRequest], + ~.Engine]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_engine" not in self._stubs: + self._stubs["get_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.EngineService/GetEngine", + request_serializer=engine_service.GetEngineRequest.serialize, + response_deserializer=engine.Engine.deserialize, + ) + return self._stubs["get_engine"] + + @property + def list_engines( + self, + ) -> Callable[ + [engine_service.ListEnginesRequest], engine_service.ListEnginesResponse + ]: + r"""Return a callable for the list engines method over gRPC. + + Lists all the + [Engine][google.cloud.discoveryengine.v1beta.Engine]s associated + with the project. + + Returns: + Callable[[~.ListEnginesRequest], + ~.ListEnginesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_engines" not in self._stubs: + self._stubs["list_engines"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.EngineService/ListEngines", + request_serializer=engine_service.ListEnginesRequest.serialize, + response_deserializer=engine_service.ListEnginesResponse.deserialize, + ) + return self._stubs["list_engines"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("EngineServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..e843c754a694 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/grpc_asyncio.py @@ -0,0 +1,435 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1beta.types import engine +from google.cloud.discoveryengine_v1beta.types import engine as gcd_engine +from google.cloud.discoveryengine_v1beta.types import engine_service + +from .base import DEFAULT_CLIENT_INFO, EngineServiceTransport +from .grpc import EngineServiceGrpcTransport + + +class EngineServiceGrpcAsyncIOTransport(EngineServiceTransport): + """gRPC AsyncIO backend transport for EngineService. + + Service for managing + [Engine][google.cloud.discoveryengine.v1beta.Engine] configuration. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_engine( + self, + ) -> Callable[ + [engine_service.CreateEngineRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create engine method over gRPC. + + Creates a [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + Returns: + Callable[[~.CreateEngineRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_engine" not in self._stubs: + self._stubs["create_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.EngineService/CreateEngine", + request_serializer=engine_service.CreateEngineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_engine"] + + @property + def delete_engine( + self, + ) -> Callable[ + [engine_service.DeleteEngineRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete engine method over gRPC. + + Deletes a [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + Returns: + Callable[[~.DeleteEngineRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_engine" not in self._stubs: + self._stubs["delete_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.EngineService/DeleteEngine", + request_serializer=engine_service.DeleteEngineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_engine"] + + @property + def update_engine( + self, + ) -> Callable[[engine_service.UpdateEngineRequest], Awaitable[gcd_engine.Engine]]: + r"""Return a callable for the update engine method over gRPC. + + Updates an [Engine][google.cloud.discoveryengine.v1beta.Engine] + + Returns: + Callable[[~.UpdateEngineRequest], + Awaitable[~.Engine]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_engine" not in self._stubs: + self._stubs["update_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.EngineService/UpdateEngine", + request_serializer=engine_service.UpdateEngineRequest.serialize, + response_deserializer=gcd_engine.Engine.deserialize, + ) + return self._stubs["update_engine"] + + @property + def get_engine( + self, + ) -> Callable[[engine_service.GetEngineRequest], Awaitable[engine.Engine]]: + r"""Return a callable for the get engine method over gRPC. + + Gets a [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + Returns: + Callable[[~.GetEngineRequest], + Awaitable[~.Engine]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_engine" not in self._stubs: + self._stubs["get_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.EngineService/GetEngine", + request_serializer=engine_service.GetEngineRequest.serialize, + response_deserializer=engine.Engine.deserialize, + ) + return self._stubs["get_engine"] + + @property + def list_engines( + self, + ) -> Callable[ + [engine_service.ListEnginesRequest], + Awaitable[engine_service.ListEnginesResponse], + ]: + r"""Return a callable for the list engines method over gRPC. + + Lists all the + [Engine][google.cloud.discoveryengine.v1beta.Engine]s associated + with the project. + + Returns: + Callable[[~.ListEnginesRequest], + Awaitable[~.ListEnginesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_engines" not in self._stubs: + self._stubs["list_engines"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.EngineService/ListEngines", + request_serializer=engine_service.ListEnginesRequest.serialize, + response_deserializer=engine_service.ListEnginesResponse.deserialize, + ) + return self._stubs["list_engines"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("EngineServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/rest.py new file mode 100644 index 000000000000..6a851c5f9386 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/rest.py @@ -0,0 +1,1278 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.types import engine +from google.cloud.discoveryengine_v1beta.types import engine as gcd_engine +from google.cloud.discoveryengine_v1beta.types import engine_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import EngineServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class EngineServiceRestInterceptor: + """Interceptor for EngineService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the EngineServiceRestTransport. + + .. code-block:: python + class MyCustomEngineServiceInterceptor(EngineServiceRestInterceptor): + def pre_create_engine(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_engine(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_engine(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_engine(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_engine(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_engine(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_engines(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_engines(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_engine(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_engine(self, response): + logging.log(f"Received response: {response}") + return response + + transport = EngineServiceRestTransport(interceptor=MyCustomEngineServiceInterceptor()) + client = EngineServiceClient(transport=transport) + + + """ + + def pre_create_engine( + self, + request: engine_service.CreateEngineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[engine_service.CreateEngineRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_engine + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_create_engine( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_engine + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + + def pre_delete_engine( + self, + request: engine_service.DeleteEngineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[engine_service.DeleteEngineRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_engine + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_delete_engine( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_engine + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + + def pre_get_engine( + self, + request: engine_service.GetEngineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[engine_service.GetEngineRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_engine + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_get_engine(self, response: engine.Engine) -> engine.Engine: + """Post-rpc interceptor for get_engine + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + + def pre_list_engines( + self, + request: engine_service.ListEnginesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[engine_service.ListEnginesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_engines + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_list_engines( + self, response: engine_service.ListEnginesResponse + ) -> engine_service.ListEnginesResponse: + """Post-rpc interceptor for list_engines + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + + def pre_update_engine( + self, + request: engine_service.UpdateEngineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[engine_service.UpdateEngineRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_engine + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_update_engine(self, response: gcd_engine.Engine) -> gcd_engine.Engine: + """Post-rpc interceptor for update_engine + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the EngineService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the EngineService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class EngineServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: EngineServiceRestInterceptor + + +class EngineServiceRestTransport(EngineServiceTransport): + """REST backend transport for EngineService. + + Service for managing + [Engine][google.cloud.discoveryengine.v1beta.Engine] configuration. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[EngineServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or EngineServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateEngine(EngineServiceRestStub): + def __hash__(self): + return hash("CreateEngine") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "engineId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: engine_service.CreateEngineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create engine method over HTTP. + + Args: + request (~.engine_service.CreateEngineRequest): + The request object. Request for + [EngineService.CreateEngine][google.cloud.discoveryengine.v1beta.EngineService.CreateEngine] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*}/engines", + "body": "engine", + }, + ] + request, metadata = self._interceptor.pre_create_engine(request, metadata) + pb_request = engine_service.CreateEngineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_engine(resp) + return resp + + class _DeleteEngine(EngineServiceRestStub): + def __hash__(self): + return hash("DeleteEngine") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: engine_service.DeleteEngineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete engine method over HTTP. + + Args: + request (~.engine_service.DeleteEngineRequest): + The request object. Request message for + [EngineService.DeleteEngine][google.cloud.discoveryengine.v1beta.EngineService.DeleteEngine] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_engine(request, metadata) + pb_request = engine_service.DeleteEngineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_engine(resp) + return resp + + class _GetEngine(EngineServiceRestStub): + def __hash__(self): + return hash("GetEngine") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: engine_service.GetEngineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> engine.Engine: + r"""Call the get engine method over HTTP. + + Args: + request (~.engine_service.GetEngineRequest): + The request object. Request message for + [EngineService.GetEngine][google.cloud.discoveryengine.v1beta.EngineService.GetEngine] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.engine.Engine: + Metadata that describes the training and serving + parameters of an + [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*}", + }, + ] + request, metadata = self._interceptor.pre_get_engine(request, metadata) + pb_request = engine_service.GetEngineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = engine.Engine() + pb_resp = engine.Engine.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_engine(resp) + return resp + + class _ListEngines(EngineServiceRestStub): + def __hash__(self): + return hash("ListEngines") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: engine_service.ListEnginesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> engine_service.ListEnginesResponse: + r"""Call the list engines method over HTTP. + + Args: + request (~.engine_service.ListEnginesRequest): + The request object. Request message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1beta.EngineService.ListEngines] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.engine_service.ListEnginesResponse: + Response message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1beta.EngineService.ListEngines] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*}/engines", + }, + ] + request, metadata = self._interceptor.pre_list_engines(request, metadata) + pb_request = engine_service.ListEnginesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = engine_service.ListEnginesResponse() + pb_resp = engine_service.ListEnginesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_engines(resp) + return resp + + class _UpdateEngine(EngineServiceRestStub): + def __hash__(self): + return hash("UpdateEngine") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: engine_service.UpdateEngineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_engine.Engine: + r"""Call the update engine method over HTTP. + + Args: + request (~.engine_service.UpdateEngineRequest): + The request object. Request message for + [EngineService.UpdateEngine][google.cloud.discoveryengine.v1beta.EngineService.UpdateEngine] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_engine.Engine: + Metadata that describes the training and serving + parameters of an + [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{engine.name=projects/*/locations/*/collections/*/engines/*}", + "body": "engine", + }, + ] + request, metadata = self._interceptor.pre_update_engine(request, metadata) + pb_request = engine_service.UpdateEngineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_engine.Engine() + pb_resp = gcd_engine.Engine.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_engine(resp) + return resp + + @property + def create_engine( + self, + ) -> Callable[[engine_service.CreateEngineRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEngine(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_engine( + self, + ) -> Callable[[engine_service.DeleteEngineRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEngine(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_engine(self) -> Callable[[engine_service.GetEngineRequest], engine.Engine]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEngine(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_engines( + self, + ) -> Callable[ + [engine_service.ListEnginesRequest], engine_service.ListEnginesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEngines(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_engine( + self, + ) -> Callable[[engine_service.UpdateEngineRequest], gcd_engine.Engine]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEngine(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(EngineServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(EngineServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("EngineServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py index 179f26290792..5ffe31b63769 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py @@ -297,6 +297,11 @@ def __call__( "uri": "/v1beta/{serving_config=projects/*/locations/*/collections/*/dataStores/*/servingConfigs/*}:recommend", "body": "*", }, + { + "method": "post", + "uri": "/v1beta/{serving_config=projects/*/locations/*/collections/*/engines/*/servingConfigs/*}:recommend", + "body": "*", + }, ] request, metadata = self._interceptor.pre_recommend(request, metadata) pb_request = recommendation_service.RecommendRequest.pb(request) @@ -388,6 +393,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -502,6 +511,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/rest.py index bf8371b16ff1..b0b48eeccb70 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/rest.py @@ -387,6 +387,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -441,6 +445,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, ], "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", @@ -1071,6 +1079,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -1185,6 +1197,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/rest.py index 952d74176406..d57a6e96d341 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/rest.py @@ -298,6 +298,11 @@ def __call__( "uri": "/v1beta/{serving_config=projects/*/locations/*/collections/*/dataStores/*/servingConfigs/*}:search", "body": "*", }, + { + "method": "post", + "uri": "/v1beta/{serving_config=projects/*/locations/*/collections/*/engines/*/servingConfigs/*}:search", + "body": "*", + }, ] request, metadata = self._interceptor.pre_search(request, metadata) pb_request = search_service.SearchRequest.pb(request) @@ -386,6 +391,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -500,6 +509,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/__init__.py new file mode 100644 index 000000000000..2a6b766de2a6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ServingConfigServiceAsyncClient +from .client import ServingConfigServiceClient + +__all__ = ( + "ServingConfigServiceClient", + "ServingConfigServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/async_client.py new file mode 100644 index 000000000000..fae5270a78c9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/async_client.py @@ -0,0 +1,767 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.services.serving_config_service import pagers +from google.cloud.discoveryengine_v1beta.types import ( + serving_config as gcd_serving_config, +) +from google.cloud.discoveryengine_v1beta.types import common +from google.cloud.discoveryengine_v1beta.types import serving_config +from google.cloud.discoveryengine_v1beta.types import serving_config_service + +from .client import ServingConfigServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ServingConfigServiceTransport +from .transports.grpc_asyncio import ServingConfigServiceGrpcAsyncIOTransport + + +class ServingConfigServiceAsyncClient: + """Service for modifying ServingConfig.""" + + _client: ServingConfigServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ServingConfigServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ServingConfigServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ServingConfigServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ServingConfigServiceClient._DEFAULT_UNIVERSE + + data_store_path = staticmethod(ServingConfigServiceClient.data_store_path) + parse_data_store_path = staticmethod( + ServingConfigServiceClient.parse_data_store_path + ) + serving_config_path = staticmethod(ServingConfigServiceClient.serving_config_path) + parse_serving_config_path = staticmethod( + ServingConfigServiceClient.parse_serving_config_path + ) + common_billing_account_path = staticmethod( + ServingConfigServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ServingConfigServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ServingConfigServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ServingConfigServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ServingConfigServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ServingConfigServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ServingConfigServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ServingConfigServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ServingConfigServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ServingConfigServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServingConfigServiceAsyncClient: The constructed client. + """ + return ServingConfigServiceClient.from_service_account_info.__func__(ServingConfigServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServingConfigServiceAsyncClient: The constructed client. + """ + return ServingConfigServiceClient.from_service_account_file.__func__(ServingConfigServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ServingConfigServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ServingConfigServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ServingConfigServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(ServingConfigServiceClient).get_transport_class, + type(ServingConfigServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ServingConfigServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the serving config service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ServingConfigServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ServingConfigServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def update_serving_config( + self, + request: Optional[ + Union[serving_config_service.UpdateServingConfigRequest, dict] + ] = None, + *, + serving_config: Optional[gcd_serving_config.ServingConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_serving_config.ServingConfig: + r"""Updates a ServingConfig. + + Returns a NOT_FOUND error if the ServingConfig does not exist. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_update_serving_config(): + # Create a client + client = discoveryengine_v1beta.ServingConfigServiceAsyncClient() + + # Initialize request argument(s) + serving_config = discoveryengine_v1beta.ServingConfig() + serving_config.media_config.content_watched_percentage_threshold = 0.3811 + serving_config.display_name = "display_name_value" + serving_config.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1beta.UpdateServingConfigRequest( + serving_config=serving_config, + ) + + # Make the request + response = await client.update_serving_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.UpdateServingConfigRequest, dict]]): + The request object. Request for UpdateServingConfig + method. + serving_config (:class:`google.cloud.discoveryengine_v1beta.types.ServingConfig`): + Required. The ServingConfig to + update. + + This corresponds to the ``serving_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Indicates which fields in the provided + [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] + to update. The following are NOT supported: + + - [ServingConfig.name][google.cloud.discoveryengine.v1beta.ServingConfig.name] + + If not set, all supported fields are updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.ServingConfig: + Configures metadata that is used to + generate serving time results (e.g. + search results or recommendation + predictions). The ServingConfig is + passed in the search and predict request + and generates results. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([serving_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = serving_config_service.UpdateServingConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if serving_config is not None: + request.serving_config = serving_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_serving_config, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("serving_config.name", request.serving_config.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_serving_config( + self, + request: Optional[ + Union[serving_config_service.GetServingConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> serving_config.ServingConfig: + r"""Gets a ServingConfig. + + Returns a NotFound error if the ServingConfig does not + exist. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_get_serving_config(): + # Create a client + client = discoveryengine_v1beta.ServingConfigServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetServingConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_serving_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.GetServingConfigRequest, dict]]): + The request object. Request for GetServingConfig method. + name (:class:`str`): + Required. The resource name of the ServingConfig to get. + Format: + ``projects/{project_number}/locations/{location}/collections/{collection}/dataStores/{data_store}/servingConfigs/{serving_config_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.ServingConfig: + Configures metadata that is used to + generate serving time results (e.g. + search results or recommendation + predictions). The ServingConfig is + passed in the search and predict request + and generates results. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = serving_config_service.GetServingConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_serving_config, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_serving_configs( + self, + request: Optional[ + Union[serving_config_service.ListServingConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServingConfigsAsyncPager: + r"""Lists all ServingConfigs linked to this dataStore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_list_serving_configs(): + # Create a client + client = discoveryengine_v1beta.ServingConfigServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListServingConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_serving_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.ListServingConfigsRequest, dict]]): + The request object. Request for ListServingConfigs + method. + parent (:class:`str`): + Required. The dataStore resource name. Format: + ``projects/{project_number}/locations/{location}/collections/{collection}/dataStores/{data_store}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.serving_config_service.pagers.ListServingConfigsAsyncPager: + Response for ListServingConfigs + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = serving_config_service.ListServingConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_serving_configs, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListServingConfigsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ServingConfigServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ServingConfigServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/client.py new file mode 100644 index 000000000000..0588e31ad708 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/client.py @@ -0,0 +1,1204 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.services.serving_config_service import pagers +from google.cloud.discoveryengine_v1beta.types import ( + serving_config as gcd_serving_config, +) +from google.cloud.discoveryengine_v1beta.types import common +from google.cloud.discoveryengine_v1beta.types import serving_config +from google.cloud.discoveryengine_v1beta.types import serving_config_service + +from .transports.base import DEFAULT_CLIENT_INFO, ServingConfigServiceTransport +from .transports.grpc import ServingConfigServiceGrpcTransport +from .transports.grpc_asyncio import ServingConfigServiceGrpcAsyncIOTransport +from .transports.rest import ServingConfigServiceRestTransport + + +class ServingConfigServiceClientMeta(type): + """Metaclass for the ServingConfigService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ServingConfigServiceTransport]] + _transport_registry["grpc"] = ServingConfigServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ServingConfigServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ServingConfigServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ServingConfigServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ServingConfigServiceClient(metaclass=ServingConfigServiceClientMeta): + """Service for modifying ServingConfig.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServingConfigServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServingConfigServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ServingConfigServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ServingConfigServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def serving_config_path( + project: str, + location: str, + data_store: str, + serving_config: str, + ) -> str: + """Returns a fully-qualified serving_config string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/servingConfigs/{serving_config}".format( + project=project, + location=location, + data_store=data_store, + serving_config=serving_config, + ) + + @staticmethod + def parse_serving_config_path(path: str) -> Dict[str, str]: + """Parses a serving_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/servingConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ServingConfigServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ServingConfigServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ServingConfigServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ServingConfigServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ServingConfigServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ServingConfigServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ServingConfigServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the serving config service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ServingConfigServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ServingConfigServiceClient._read_environment_variables() + self._client_cert_source = ServingConfigServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ServingConfigServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ServingConfigServiceTransport) + if transport_provided: + # transport is a ServingConfigServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ServingConfigServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ServingConfigServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(cast(str, transport)) + self._transport = Transport( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def update_serving_config( + self, + request: Optional[ + Union[serving_config_service.UpdateServingConfigRequest, dict] + ] = None, + *, + serving_config: Optional[gcd_serving_config.ServingConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_serving_config.ServingConfig: + r"""Updates a ServingConfig. + + Returns a NOT_FOUND error if the ServingConfig does not exist. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_update_serving_config(): + # Create a client + client = discoveryengine_v1beta.ServingConfigServiceClient() + + # Initialize request argument(s) + serving_config = discoveryengine_v1beta.ServingConfig() + serving_config.media_config.content_watched_percentage_threshold = 0.3811 + serving_config.display_name = "display_name_value" + serving_config.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1beta.UpdateServingConfigRequest( + serving_config=serving_config, + ) + + # Make the request + response = client.update_serving_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.UpdateServingConfigRequest, dict]): + The request object. Request for UpdateServingConfig + method. + serving_config (google.cloud.discoveryengine_v1beta.types.ServingConfig): + Required. The ServingConfig to + update. + + This corresponds to the ``serving_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] + to update. The following are NOT supported: + + - [ServingConfig.name][google.cloud.discoveryengine.v1beta.ServingConfig.name] + + If not set, all supported fields are updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.ServingConfig: + Configures metadata that is used to + generate serving time results (e.g. + search results or recommendation + predictions). The ServingConfig is + passed in the search and predict request + and generates results. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([serving_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a serving_config_service.UpdateServingConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, serving_config_service.UpdateServingConfigRequest): + request = serving_config_service.UpdateServingConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if serving_config is not None: + request.serving_config = serving_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_serving_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("serving_config.name", request.serving_config.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_serving_config( + self, + request: Optional[ + Union[serving_config_service.GetServingConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> serving_config.ServingConfig: + r"""Gets a ServingConfig. + + Returns a NotFound error if the ServingConfig does not + exist. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_get_serving_config(): + # Create a client + client = discoveryengine_v1beta.ServingConfigServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetServingConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_serving_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.GetServingConfigRequest, dict]): + The request object. Request for GetServingConfig method. + name (str): + Required. The resource name of the ServingConfig to get. + Format: + ``projects/{project_number}/locations/{location}/collections/{collection}/dataStores/{data_store}/servingConfigs/{serving_config_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.ServingConfig: + Configures metadata that is used to + generate serving time results (e.g. + search results or recommendation + predictions). The ServingConfig is + passed in the search and predict request + and generates results. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a serving_config_service.GetServingConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, serving_config_service.GetServingConfigRequest): + request = serving_config_service.GetServingConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_serving_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_serving_configs( + self, + request: Optional[ + Union[serving_config_service.ListServingConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServingConfigsPager: + r"""Lists all ServingConfigs linked to this dataStore. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_list_serving_configs(): + # Create a client + client = discoveryengine_v1beta.ServingConfigServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListServingConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_serving_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.ListServingConfigsRequest, dict]): + The request object. Request for ListServingConfigs + method. + parent (str): + Required. The dataStore resource name. Format: + ``projects/{project_number}/locations/{location}/collections/{collection}/dataStores/{data_store}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.serving_config_service.pagers.ListServingConfigsPager: + Response for ListServingConfigs + method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a serving_config_service.ListServingConfigsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, serving_config_service.ListServingConfigsRequest): + request = serving_config_service.ListServingConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_serving_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListServingConfigsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ServingConfigServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ServingConfigServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/pagers.py new file mode 100644 index 000000000000..01dd5522c890 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/pagers.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1beta.types import ( + serving_config, + serving_config_service, +) + + +class ListServingConfigsPager: + """A pager for iterating through ``list_serving_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.ListServingConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``serving_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListServingConfigs`` requests and continue to iterate + through the ``serving_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.ListServingConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., serving_config_service.ListServingConfigsResponse], + request: serving_config_service.ListServingConfigsRequest, + response: serving_config_service.ListServingConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.ListServingConfigsRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.ListServingConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = serving_config_service.ListServingConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[serving_config_service.ListServingConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[serving_config.ServingConfig]: + for page in self.pages: + yield from page.serving_configs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListServingConfigsAsyncPager: + """A pager for iterating through ``list_serving_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.ListServingConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``serving_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListServingConfigs`` requests and continue to iterate + through the ``serving_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.ListServingConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[serving_config_service.ListServingConfigsResponse] + ], + request: serving_config_service.ListServingConfigsRequest, + response: serving_config_service.ListServingConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.ListServingConfigsRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.ListServingConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = serving_config_service.ListServingConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[serving_config_service.ListServingConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[serving_config.ServingConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.serving_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/__init__.py new file mode 100644 index 000000000000..36d949ceee22 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ServingConfigServiceTransport +from .grpc import ServingConfigServiceGrpcTransport +from .grpc_asyncio import ServingConfigServiceGrpcAsyncIOTransport +from .rest import ServingConfigServiceRestInterceptor, ServingConfigServiceRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[ServingConfigServiceTransport]] +_transport_registry["grpc"] = ServingConfigServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ServingConfigServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ServingConfigServiceRestTransport + +__all__ = ( + "ServingConfigServiceTransport", + "ServingConfigServiceGrpcTransport", + "ServingConfigServiceGrpcAsyncIOTransport", + "ServingConfigServiceRestTransport", + "ServingConfigServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/base.py new file mode 100644 index 000000000000..2922226e54ce --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/base.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version +from google.cloud.discoveryengine_v1beta.types import ( + serving_config as gcd_serving_config, +) +from google.cloud.discoveryengine_v1beta.types import serving_config +from google.cloud.discoveryengine_v1beta.types import serving_config_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ServingConfigServiceTransport(abc.ABC): + """Abstract transport class for ServingConfigService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.update_serving_config: gapic_v1.method.wrap_method( + self.update_serving_config, + default_timeout=None, + client_info=client_info, + ), + self.get_serving_config: gapic_v1.method.wrap_method( + self.get_serving_config, + default_timeout=None, + client_info=client_info, + ), + self.list_serving_configs: gapic_v1.method.wrap_method( + self.list_serving_configs, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def update_serving_config( + self, + ) -> Callable[ + [serving_config_service.UpdateServingConfigRequest], + Union[ + gcd_serving_config.ServingConfig, + Awaitable[gcd_serving_config.ServingConfig], + ], + ]: + raise NotImplementedError() + + @property + def get_serving_config( + self, + ) -> Callable[ + [serving_config_service.GetServingConfigRequest], + Union[serving_config.ServingConfig, Awaitable[serving_config.ServingConfig]], + ]: + raise NotImplementedError() + + @property + def list_serving_configs( + self, + ) -> Callable[ + [serving_config_service.ListServingConfigsRequest], + Union[ + serving_config_service.ListServingConfigsResponse, + Awaitable[serving_config_service.ListServingConfigsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ServingConfigServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/grpc.py new file mode 100644 index 000000000000..5a88faa286bc --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/grpc.py @@ -0,0 +1,372 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1beta.types import ( + serving_config as gcd_serving_config, +) +from google.cloud.discoveryengine_v1beta.types import serving_config +from google.cloud.discoveryengine_v1beta.types import serving_config_service + +from .base import DEFAULT_CLIENT_INFO, ServingConfigServiceTransport + + +class ServingConfigServiceGrpcTransport(ServingConfigServiceTransport): + """gRPC backend transport for ServingConfigService. + + Service for modifying ServingConfig. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def update_serving_config( + self, + ) -> Callable[ + [serving_config_service.UpdateServingConfigRequest], + gcd_serving_config.ServingConfig, + ]: + r"""Return a callable for the update serving config method over gRPC. + + Updates a ServingConfig. + + Returns a NOT_FOUND error if the ServingConfig does not exist. + + Returns: + Callable[[~.UpdateServingConfigRequest], + ~.ServingConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_serving_config" not in self._stubs: + self._stubs["update_serving_config"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ServingConfigService/UpdateServingConfig", + request_serializer=serving_config_service.UpdateServingConfigRequest.serialize, + response_deserializer=gcd_serving_config.ServingConfig.deserialize, + ) + return self._stubs["update_serving_config"] + + @property + def get_serving_config( + self, + ) -> Callable[ + [serving_config_service.GetServingConfigRequest], serving_config.ServingConfig + ]: + r"""Return a callable for the get serving config method over gRPC. + + Gets a ServingConfig. + + Returns a NotFound error if the ServingConfig does not + exist. + + Returns: + Callable[[~.GetServingConfigRequest], + ~.ServingConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_serving_config" not in self._stubs: + self._stubs["get_serving_config"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ServingConfigService/GetServingConfig", + request_serializer=serving_config_service.GetServingConfigRequest.serialize, + response_deserializer=serving_config.ServingConfig.deserialize, + ) + return self._stubs["get_serving_config"] + + @property + def list_serving_configs( + self, + ) -> Callable[ + [serving_config_service.ListServingConfigsRequest], + serving_config_service.ListServingConfigsResponse, + ]: + r"""Return a callable for the list serving configs method over gRPC. + + Lists all ServingConfigs linked to this dataStore. + + Returns: + Callable[[~.ListServingConfigsRequest], + ~.ListServingConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_serving_configs" not in self._stubs: + self._stubs["list_serving_configs"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ServingConfigService/ListServingConfigs", + request_serializer=serving_config_service.ListServingConfigsRequest.serialize, + response_deserializer=serving_config_service.ListServingConfigsResponse.deserialize, + ) + return self._stubs["list_serving_configs"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ServingConfigServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..0f7a7d32150e --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/grpc_asyncio.py @@ -0,0 +1,372 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1beta.types import ( + serving_config as gcd_serving_config, +) +from google.cloud.discoveryengine_v1beta.types import serving_config +from google.cloud.discoveryengine_v1beta.types import serving_config_service + +from .base import DEFAULT_CLIENT_INFO, ServingConfigServiceTransport +from .grpc import ServingConfigServiceGrpcTransport + + +class ServingConfigServiceGrpcAsyncIOTransport(ServingConfigServiceTransport): + """gRPC AsyncIO backend transport for ServingConfigService. + + Service for modifying ServingConfig. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def update_serving_config( + self, + ) -> Callable[ + [serving_config_service.UpdateServingConfigRequest], + Awaitable[gcd_serving_config.ServingConfig], + ]: + r"""Return a callable for the update serving config method over gRPC. + + Updates a ServingConfig. + + Returns a NOT_FOUND error if the ServingConfig does not exist. + + Returns: + Callable[[~.UpdateServingConfigRequest], + Awaitable[~.ServingConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_serving_config" not in self._stubs: + self._stubs["update_serving_config"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ServingConfigService/UpdateServingConfig", + request_serializer=serving_config_service.UpdateServingConfigRequest.serialize, + response_deserializer=gcd_serving_config.ServingConfig.deserialize, + ) + return self._stubs["update_serving_config"] + + @property + def get_serving_config( + self, + ) -> Callable[ + [serving_config_service.GetServingConfigRequest], + Awaitable[serving_config.ServingConfig], + ]: + r"""Return a callable for the get serving config method over gRPC. + + Gets a ServingConfig. + + Returns a NotFound error if the ServingConfig does not + exist. + + Returns: + Callable[[~.GetServingConfigRequest], + Awaitable[~.ServingConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_serving_config" not in self._stubs: + self._stubs["get_serving_config"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ServingConfigService/GetServingConfig", + request_serializer=serving_config_service.GetServingConfigRequest.serialize, + response_deserializer=serving_config.ServingConfig.deserialize, + ) + return self._stubs["get_serving_config"] + + @property + def list_serving_configs( + self, + ) -> Callable[ + [serving_config_service.ListServingConfigsRequest], + Awaitable[serving_config_service.ListServingConfigsResponse], + ]: + r"""Return a callable for the list serving configs method over gRPC. + + Lists all ServingConfigs linked to this dataStore. + + Returns: + Callable[[~.ListServingConfigsRequest], + Awaitable[~.ListServingConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_serving_configs" not in self._stubs: + self._stubs["list_serving_configs"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.ServingConfigService/ListServingConfigs", + request_serializer=serving_config_service.ListServingConfigsRequest.serialize, + response_deserializer=serving_config_service.ListServingConfigsResponse.deserialize, + ) + return self._stubs["list_serving_configs"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("ServingConfigServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/rest.py new file mode 100644 index 000000000000..b56d37ebdc6f --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/rest.py @@ -0,0 +1,912 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.types import ( + serving_config as gcd_serving_config, +) +from google.cloud.discoveryengine_v1beta.types import serving_config +from google.cloud.discoveryengine_v1beta.types import serving_config_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ServingConfigServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ServingConfigServiceRestInterceptor: + """Interceptor for ServingConfigService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ServingConfigServiceRestTransport. + + .. code-block:: python + class MyCustomServingConfigServiceInterceptor(ServingConfigServiceRestInterceptor): + def pre_get_serving_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_serving_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_serving_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_serving_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_serving_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_serving_config(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ServingConfigServiceRestTransport(interceptor=MyCustomServingConfigServiceInterceptor()) + client = ServingConfigServiceClient(transport=transport) + + + """ + + def pre_get_serving_config( + self, + request: serving_config_service.GetServingConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + serving_config_service.GetServingConfigRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_serving_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServingConfigService server. + """ + return request, metadata + + def post_get_serving_config( + self, response: serving_config.ServingConfig + ) -> serving_config.ServingConfig: + """Post-rpc interceptor for get_serving_config + + Override in a subclass to manipulate the response + after it is returned by the ServingConfigService server but before + it is returned to user code. + """ + return response + + def pre_list_serving_configs( + self, + request: serving_config_service.ListServingConfigsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + serving_config_service.ListServingConfigsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_serving_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServingConfigService server. + """ + return request, metadata + + def post_list_serving_configs( + self, response: serving_config_service.ListServingConfigsResponse + ) -> serving_config_service.ListServingConfigsResponse: + """Post-rpc interceptor for list_serving_configs + + Override in a subclass to manipulate the response + after it is returned by the ServingConfigService server but before + it is returned to user code. + """ + return response + + def pre_update_serving_config( + self, + request: serving_config_service.UpdateServingConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + serving_config_service.UpdateServingConfigRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_serving_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServingConfigService server. + """ + return request, metadata + + def post_update_serving_config( + self, response: gcd_serving_config.ServingConfig + ) -> gcd_serving_config.ServingConfig: + """Post-rpc interceptor for update_serving_config + + Override in a subclass to manipulate the response + after it is returned by the ServingConfigService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServingConfigService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ServingConfigService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServingConfigService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ServingConfigService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ServingConfigServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ServingConfigServiceRestInterceptor + + +class ServingConfigServiceRestTransport(ServingConfigServiceTransport): + """REST backend transport for ServingConfigService. + + Service for modifying ServingConfig. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ServingConfigServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ServingConfigServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetServingConfig(ServingConfigServiceRestStub): + def __hash__(self): + return hash("GetServingConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: serving_config_service.GetServingConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> serving_config.ServingConfig: + r"""Call the get serving config method over HTTP. + + Args: + request (~.serving_config_service.GetServingConfigRequest): + The request object. Request for GetServingConfig method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.serving_config.ServingConfig: + Configures metadata that is used to + generate serving time results (e.g. + search results or recommendation + predictions). The ServingConfig is + passed in the search and predict request + and generates results. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/servingConfigs/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/servingConfigs/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/servingConfigs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_serving_config( + request, metadata + ) + pb_request = serving_config_service.GetServingConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = serving_config.ServingConfig() + pb_resp = serving_config.ServingConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_serving_config(resp) + return resp + + class _ListServingConfigs(ServingConfigServiceRestStub): + def __hash__(self): + return hash("ListServingConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: serving_config_service.ListServingConfigsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> serving_config_service.ListServingConfigsResponse: + r"""Call the list serving configs method over HTTP. + + Args: + request (~.serving_config_service.ListServingConfigsRequest): + The request object. Request for ListServingConfigs + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.serving_config_service.ListServingConfigsResponse: + Response for ListServingConfigs + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*}/servingConfigs", + }, + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*}/servingConfigs", + }, + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/engines/*}/servingConfigs", + }, + ] + request, metadata = self._interceptor.pre_list_serving_configs( + request, metadata + ) + pb_request = serving_config_service.ListServingConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = serving_config_service.ListServingConfigsResponse() + pb_resp = serving_config_service.ListServingConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_serving_configs(resp) + return resp + + class _UpdateServingConfig(ServingConfigServiceRestStub): + def __hash__(self): + return hash("UpdateServingConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: serving_config_service.UpdateServingConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_serving_config.ServingConfig: + r"""Call the update serving config method over HTTP. + + Args: + request (~.serving_config_service.UpdateServingConfigRequest): + The request object. Request for UpdateServingConfig + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_serving_config.ServingConfig: + Configures metadata that is used to + generate serving time results (e.g. + search results or recommendation + predictions). The ServingConfig is + passed in the search and predict request + and generates results. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{serving_config.name=projects/*/locations/*/dataStores/*/servingConfigs/*}", + "body": "serving_config", + }, + { + "method": "patch", + "uri": "/v1beta/{serving_config.name=projects/*/locations/*/collections/*/dataStores/*/servingConfigs/*}", + "body": "serving_config", + }, + { + "method": "patch", + "uri": "/v1beta/{serving_config.name=projects/*/locations/*/collections/*/engines/*/servingConfigs/*}", + "body": "serving_config", + }, + ] + request, metadata = self._interceptor.pre_update_serving_config( + request, metadata + ) + pb_request = serving_config_service.UpdateServingConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_serving_config.ServingConfig() + pb_resp = gcd_serving_config.ServingConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_serving_config(resp) + return resp + + @property + def get_serving_config( + self, + ) -> Callable[ + [serving_config_service.GetServingConfigRequest], serving_config.ServingConfig + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetServingConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_serving_configs( + self, + ) -> Callable[ + [serving_config_service.ListServingConfigsRequest], + serving_config_service.ListServingConfigsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListServingConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_serving_config( + self, + ) -> Callable[ + [serving_config_service.UpdateServingConfigRequest], + gcd_serving_config.ServingConfig, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateServingConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ServingConfigServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ServingConfigServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ServingConfigServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/__init__.py new file mode 100644 index 000000000000..e99e4a4e4ff5 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SiteSearchEngineServiceAsyncClient +from .client import SiteSearchEngineServiceClient + +__all__ = ( + "SiteSearchEngineServiceClient", + "SiteSearchEngineServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/async_client.py new file mode 100644 index 000000000000..6fa9b4bd4c16 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/async_client.py @@ -0,0 +1,1823 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.services.site_search_engine_service import ( + pagers, +) +from google.cloud.discoveryengine_v1beta.types import ( + site_search_engine, + site_search_engine_service, +) + +from .client import SiteSearchEngineServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, SiteSearchEngineServiceTransport +from .transports.grpc_asyncio import SiteSearchEngineServiceGrpcAsyncIOTransport + + +class SiteSearchEngineServiceAsyncClient: + """Service for managing site search related resources.""" + + _client: SiteSearchEngineServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = SiteSearchEngineServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SiteSearchEngineServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + SiteSearchEngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = SiteSearchEngineServiceClient._DEFAULT_UNIVERSE + + site_search_engine_path = staticmethod( + SiteSearchEngineServiceClient.site_search_engine_path + ) + parse_site_search_engine_path = staticmethod( + SiteSearchEngineServiceClient.parse_site_search_engine_path + ) + target_site_path = staticmethod(SiteSearchEngineServiceClient.target_site_path) + parse_target_site_path = staticmethod( + SiteSearchEngineServiceClient.parse_target_site_path + ) + common_billing_account_path = staticmethod( + SiteSearchEngineServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SiteSearchEngineServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SiteSearchEngineServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SiteSearchEngineServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SiteSearchEngineServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SiteSearchEngineServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + SiteSearchEngineServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + SiteSearchEngineServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + SiteSearchEngineServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + SiteSearchEngineServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SiteSearchEngineServiceAsyncClient: The constructed client. + """ + return SiteSearchEngineServiceClient.from_service_account_info.__func__(SiteSearchEngineServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SiteSearchEngineServiceAsyncClient: The constructed client. + """ + return SiteSearchEngineServiceClient.from_service_account_file.__func__(SiteSearchEngineServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SiteSearchEngineServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SiteSearchEngineServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SiteSearchEngineServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = functools.partial( + type(SiteSearchEngineServiceClient).get_transport_class, + type(SiteSearchEngineServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SiteSearchEngineServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the site search engine service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SiteSearchEngineServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SiteSearchEngineServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_site_search_engine( + self, + request: Optional[ + Union[site_search_engine_service.GetSiteSearchEngineRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.SiteSearchEngine: + r"""Gets the + [SiteSearchEngine][google.cloud.discoveryengine.v1beta.SiteSearchEngine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_get_site_search_engine(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetSiteSearchEngineRequest( + name="name_value", + ) + + # Make the request + response = await client.get_site_search_engine(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.GetSiteSearchEngineRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.GetSiteSearchEngine][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.GetSiteSearchEngine] + method. + name (:class:`str`): + Required. Resource name of + [SiteSearchEngine][google.cloud.discoveryengine.v1beta.SiteSearchEngine], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to access the + [SiteSearchEngine], regardless of whether or not it + exists, a PERMISSION_DENIED error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.SiteSearchEngine: + SiteSearchEngine captures DataStore + level site search persisting + configurations. It is a singleton value + per data store. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.GetSiteSearchEngineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_site_search_engine, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_target_site( + self, + request: Optional[ + Union[site_search_engine_service.CreateTargetSiteRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + target_site: Optional[site_search_engine.TargetSite] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_create_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1beta.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1beta.CreateTargetSiteRequest( + parent="parent_value", + target_site=target_site, + ) + + # Make the request + operation = client.create_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.CreateTargetSiteRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.CreateTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.CreateTargetSite] + method. + parent (:class:`str`): + Required. Parent resource name of + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_site (:class:`google.cloud.discoveryengine_v1beta.types.TargetSite`): + Required. The + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] + to create. + + This corresponds to the ``target_site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1beta.types.TargetSite` + A target site for the SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, target_site]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.CreateTargetSiteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if target_site is not None: + request.target_site = target_site + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_target_site, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine.TargetSite, + metadata_type=site_search_engine_service.CreateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + async def batch_create_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.BatchCreateTargetSitesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] in + a batch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_batch_create_target_sites(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + requests = discoveryengine_v1beta.CreateTargetSiteRequest() + requests.parent = "parent_value" + requests.target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1beta.BatchCreateTargetSitesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.BatchCreateTargetSitesRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchCreateTargetSites] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.BatchCreateTargetSitesResponse` Response message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchCreateTargetSites] + method. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.BatchCreateTargetSitesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_create_target_sites, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine_service.BatchCreateTargetSitesResponse, + metadata_type=site_search_engine_service.BatchCreateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + async def get_target_site( + self, + request: Optional[ + Union[site_search_engine_service.GetTargetSiteRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.TargetSite: + r"""Gets a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_get_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetTargetSiteRequest( + name="name_value", + ) + + # Make the request + response = await client.get_target_site(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.GetTargetSiteRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.GetTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.GetTargetSite] + method. + name (:class:`str`): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.TargetSite: + A target site for the + SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.GetTargetSiteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_target_site, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_target_site( + self, + request: Optional[ + Union[site_search_engine_service.UpdateTargetSiteRequest, dict] + ] = None, + *, + target_site: Optional[site_search_engine.TargetSite] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_update_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1beta.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1beta.UpdateTargetSiteRequest( + target_site=target_site, + ) + + # Make the request + operation = client.update_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.UpdateTargetSiteRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.UpdateTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.UpdateTargetSite] + method. + target_site (:class:`google.cloud.discoveryengine_v1beta.types.TargetSite`): + Required. The target site to update. If the caller does + not have permission to update the + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``target_site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1beta.types.TargetSite` + A target site for the SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([target_site]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.UpdateTargetSiteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if target_site is not None: + request.target_site = target_site + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_target_site, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("target_site.name", request.target_site.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine.TargetSite, + metadata_type=site_search_engine_service.UpdateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + async def delete_target_site( + self, + request: Optional[ + Union[site_search_engine_service.DeleteTargetSiteRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_delete_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteTargetSiteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.DeleteTargetSiteRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.DeleteTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DeleteTargetSite] + method. + name (:class:`str`): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.DeleteTargetSiteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_target_site, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=site_search_engine_service.DeleteTargetSiteMetadata, + ) + + # Done; return the response. + return response + + async def list_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.ListTargetSitesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTargetSitesAsyncPager: + r"""Gets a list of + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_list_target_sites(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_target_sites(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.ListTargetSitesRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.ListTargetSites] + method. + parent (:class:`str`): + Required. The parent site search engine resource name, + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to list + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]s + under this site search engine, regardless of whether or + not this branch exists, a PERMISSION_DENIED error is + returned. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.site_search_engine_service.pagers.ListTargetSitesAsyncPager: + Response message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.ListTargetSites] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = site_search_engine_service.ListTargetSitesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_target_sites, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTargetSitesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def enable_advanced_site_search( + self, + request: Optional[ + Union[site_search_engine_service.EnableAdvancedSiteSearchRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Upgrade from basic site search to advanced site + search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_enable_advanced_site_search(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.EnableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.enable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.EnableAdvancedSiteSearchRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.EnableAdvancedSiteSearchResponse` Response message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.EnableAdvancedSiteSearchRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.enable_advanced_site_search, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine_service.EnableAdvancedSiteSearchResponse, + metadata_type=site_search_engine_service.EnableAdvancedSiteSearchMetadata, + ) + + # Done; return the response. + return response + + async def disable_advanced_site_search( + self, + request: Optional[ + Union[site_search_engine_service.DisableAdvancedSiteSearchRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Downgrade from advanced site search to basic site + search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_disable_advanced_site_search(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DisableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.disable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.DisableAdvancedSiteSearchRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.DisableAdvancedSiteSearchResponse` Response message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.DisableAdvancedSiteSearchRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.disable_advanced_site_search, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine_service.DisableAdvancedSiteSearchResponse, + metadata_type=site_search_engine_service.DisableAdvancedSiteSearchMetadata, + ) + + # Done; return the response. + return response + + async def recrawl_uris( + self, + request: Optional[ + Union[site_search_engine_service.RecrawlUrisRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Request on-demand recrawl for a list of URIs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_recrawl_uris(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.RecrawlUrisRequest( + site_search_engine="site_search_engine_value", + uris=['uris_value1', 'uris_value2'], + ) + + # Make the request + operation = client.recrawl_uris(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.RecrawlUrisRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.RecrawlUris] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.RecrawlUrisResponse` Response message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.RecrawlUris] + method. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.RecrawlUrisRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.recrawl_uris, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine_service.RecrawlUrisResponse, + metadata_type=site_search_engine_service.RecrawlUrisMetadata, + ) + + # Done; return the response. + return response + + async def batch_verify_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.BatchVerifyTargetSitesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Verify target sites' ownership and validity. + This API sends all the target sites under site search + engine for verification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_batch_verify_target_sites(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchVerifyTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.batch_verify_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.BatchVerifyTargetSitesRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchVerifyTargetSites] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.BatchVerifyTargetSitesResponse` Response message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchVerifyTargetSites] + method. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.BatchVerifyTargetSitesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_verify_target_sites, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + site_search_engine_service.BatchVerifyTargetSitesResponse, + metadata_type=site_search_engine_service.BatchVerifyTargetSitesMetadata, + ) + + # Done; return the response. + return response + + async def fetch_domain_verification_status( + self, + request: Optional[ + Union[site_search_engine_service.FetchDomainVerificationStatusRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchDomainVerificationStatusAsyncPager: + r"""Returns list of target sites with its domain verification + status. This method can only be called under data store with + BASIC_SITE_SEARCH state at the moment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_fetch_domain_verification_status(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.FetchDomainVerificationStatusRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + page_result = client.fetch_domain_verification_status(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.FetchDomainVerificationStatusRequest, dict]]): + The request object. Request message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.site_search_engine_service.pagers.FetchDomainVerificationStatusAsyncPager: + Response message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + request = site_search_engine_service.FetchDomainVerificationStatusRequest( + request + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_domain_verification_status, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchDomainVerificationStatusAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "SiteSearchEngineServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SiteSearchEngineServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/client.py new file mode 100644 index 000000000000..db33a85e8441 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/client.py @@ -0,0 +1,2288 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.services.site_search_engine_service import ( + pagers, +) +from google.cloud.discoveryengine_v1beta.types import ( + site_search_engine, + site_search_engine_service, +) + +from .transports.base import DEFAULT_CLIENT_INFO, SiteSearchEngineServiceTransport +from .transports.grpc import SiteSearchEngineServiceGrpcTransport +from .transports.grpc_asyncio import SiteSearchEngineServiceGrpcAsyncIOTransport +from .transports.rest import SiteSearchEngineServiceRestTransport + + +class SiteSearchEngineServiceClientMeta(type): + """Metaclass for the SiteSearchEngineService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[SiteSearchEngineServiceTransport]] + _transport_registry["grpc"] = SiteSearchEngineServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SiteSearchEngineServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SiteSearchEngineServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SiteSearchEngineServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SiteSearchEngineServiceClient(metaclass=SiteSearchEngineServiceClientMeta): + """Service for managing site search related resources.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SiteSearchEngineServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SiteSearchEngineServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SiteSearchEngineServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SiteSearchEngineServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def site_search_engine_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified site_search_engine string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/siteSearchEngine".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_site_search_engine_path(path: str) -> Dict[str, str]: + """Parses a site_search_engine path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/siteSearchEngine$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def target_site_path( + project: str, + location: str, + data_store: str, + target_site: str, + ) -> str: + """Returns a fully-qualified target_site string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}".format( + project=project, + location=location, + data_store=data_store, + target_site=target_site, + ) + + @staticmethod + def parse_target_site_path(path: str) -> Dict[str, str]: + """Parses a target_site path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/siteSearchEngine/targetSites/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = SiteSearchEngineServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = SiteSearchEngineServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + SiteSearchEngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = SiteSearchEngineServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = SiteSearchEngineServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or SiteSearchEngineServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SiteSearchEngineServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the site search engine service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SiteSearchEngineServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = SiteSearchEngineServiceClient._read_environment_variables() + self._client_cert_source = ( + SiteSearchEngineServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = SiteSearchEngineServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, SiteSearchEngineServiceTransport) + if transport_provided: + # transport is a SiteSearchEngineServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(SiteSearchEngineServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or SiteSearchEngineServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(cast(str, transport)) + self._transport = Transport( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_site_search_engine( + self, + request: Optional[ + Union[site_search_engine_service.GetSiteSearchEngineRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.SiteSearchEngine: + r"""Gets the + [SiteSearchEngine][google.cloud.discoveryengine.v1beta.SiteSearchEngine]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_get_site_search_engine(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetSiteSearchEngineRequest( + name="name_value", + ) + + # Make the request + response = client.get_site_search_engine(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.GetSiteSearchEngineRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.GetSiteSearchEngine][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.GetSiteSearchEngine] + method. + name (str): + Required. Resource name of + [SiteSearchEngine][google.cloud.discoveryengine.v1beta.SiteSearchEngine], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to access the + [SiteSearchEngine], regardless of whether or not it + exists, a PERMISSION_DENIED error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.SiteSearchEngine: + SiteSearchEngine captures DataStore + level site search persisting + configurations. It is a singleton value + per data store. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.GetSiteSearchEngineRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.GetSiteSearchEngineRequest + ): + request = site_search_engine_service.GetSiteSearchEngineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_site_search_engine] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_target_site( + self, + request: Optional[ + Union[site_search_engine_service.CreateTargetSiteRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + target_site: Optional[site_search_engine.TargetSite] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_create_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1beta.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1beta.CreateTargetSiteRequest( + parent="parent_value", + target_site=target_site, + ) + + # Make the request + operation = client.create_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.CreateTargetSiteRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.CreateTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.CreateTargetSite] + method. + parent (str): + Required. Parent resource name of + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_site (google.cloud.discoveryengine_v1beta.types.TargetSite): + Required. The + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] + to create. + + This corresponds to the ``target_site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1beta.types.TargetSite` + A target site for the SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, target_site]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.CreateTargetSiteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.CreateTargetSiteRequest): + request = site_search_engine_service.CreateTargetSiteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if target_site is not None: + request.target_site = target_site + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_target_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine.TargetSite, + metadata_type=site_search_engine_service.CreateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + def batch_create_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.BatchCreateTargetSitesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] in + a batch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_batch_create_target_sites(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + requests = discoveryengine_v1beta.CreateTargetSiteRequest() + requests.parent = "parent_value" + requests.target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1beta.BatchCreateTargetSitesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.BatchCreateTargetSitesRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchCreateTargetSites] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.BatchCreateTargetSitesResponse` Response message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchCreateTargetSites] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.BatchCreateTargetSitesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.BatchCreateTargetSitesRequest + ): + request = site_search_engine_service.BatchCreateTargetSitesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_create_target_sites + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine_service.BatchCreateTargetSitesResponse, + metadata_type=site_search_engine_service.BatchCreateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + def get_target_site( + self, + request: Optional[ + Union[site_search_engine_service.GetTargetSiteRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.TargetSite: + r"""Gets a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_get_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetTargetSiteRequest( + name="name_value", + ) + + # Make the request + response = client.get_target_site(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.GetTargetSiteRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.GetTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.GetTargetSite] + method. + name (str): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.TargetSite: + A target site for the + SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.GetTargetSiteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.GetTargetSiteRequest): + request = site_search_engine_service.GetTargetSiteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_target_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_target_site( + self, + request: Optional[ + Union[site_search_engine_service.UpdateTargetSiteRequest, dict] + ] = None, + *, + target_site: Optional[site_search_engine.TargetSite] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_update_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1beta.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1beta.UpdateTargetSiteRequest( + target_site=target_site, + ) + + # Make the request + operation = client.update_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.UpdateTargetSiteRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.UpdateTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.UpdateTargetSite] + method. + target_site (google.cloud.discoveryengine_v1beta.types.TargetSite): + Required. The target site to update. If the caller does + not have permission to update the + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] + to update does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``target_site`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.discoveryengine_v1beta.types.TargetSite` + A target site for the SiteSearchEngine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([target_site]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.UpdateTargetSiteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.UpdateTargetSiteRequest): + request = site_search_engine_service.UpdateTargetSiteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if target_site is not None: + request.target_site = target_site + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_target_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("target_site.name", request.target_site.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine.TargetSite, + metadata_type=site_search_engine_service.UpdateTargetSiteMetadata, + ) + + # Done; return the response. + return response + + def delete_target_site( + self, + request: Optional[ + Union[site_search_engine_service.DeleteTargetSiteRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_delete_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteTargetSiteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.DeleteTargetSiteRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.DeleteTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DeleteTargetSite] + method. + name (str): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + regardless of whether or not it exists, a + PERMISSION_DENIED error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] + does not exist, a NOT_FOUND error is returned. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.DeleteTargetSiteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.DeleteTargetSiteRequest): + request = site_search_engine_service.DeleteTargetSiteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_target_site] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=site_search_engine_service.DeleteTargetSiteMetadata, + ) + + # Done; return the response. + return response + + def list_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.ListTargetSitesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTargetSitesPager: + r"""Gets a list of + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_list_target_sites(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_target_sites(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.ListTargetSitesRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.ListTargetSites] + method. + parent (str): + Required. The parent site search engine resource name, + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to list + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]s + under this site search engine, regardless of whether or + not this branch exists, a PERMISSION_DENIED error is + returned. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.site_search_engine_service.pagers.ListTargetSitesPager: + Response message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.ListTargetSites] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.ListTargetSitesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.ListTargetSitesRequest): + request = site_search_engine_service.ListTargetSitesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_target_sites] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTargetSitesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def enable_advanced_site_search( + self, + request: Optional[ + Union[site_search_engine_service.EnableAdvancedSiteSearchRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Upgrade from basic site search to advanced site + search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_enable_advanced_site_search(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.EnableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.enable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.EnableAdvancedSiteSearchRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.EnableAdvancedSiteSearchResponse` Response message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.EnableAdvancedSiteSearchRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.EnableAdvancedSiteSearchRequest + ): + request = site_search_engine_service.EnableAdvancedSiteSearchRequest( + request + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.enable_advanced_site_search + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine_service.EnableAdvancedSiteSearchResponse, + metadata_type=site_search_engine_service.EnableAdvancedSiteSearchMetadata, + ) + + # Done; return the response. + return response + + def disable_advanced_site_search( + self, + request: Optional[ + Union[site_search_engine_service.DisableAdvancedSiteSearchRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Downgrade from advanced site search to basic site + search. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_disable_advanced_site_search(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DisableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.disable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.DisableAdvancedSiteSearchRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.DisableAdvancedSiteSearchResponse` Response message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.DisableAdvancedSiteSearchRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.DisableAdvancedSiteSearchRequest + ): + request = site_search_engine_service.DisableAdvancedSiteSearchRequest( + request + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.disable_advanced_site_search + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine_service.DisableAdvancedSiteSearchResponse, + metadata_type=site_search_engine_service.DisableAdvancedSiteSearchMetadata, + ) + + # Done; return the response. + return response + + def recrawl_uris( + self, + request: Optional[ + Union[site_search_engine_service.RecrawlUrisRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Request on-demand recrawl for a list of URIs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_recrawl_uris(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.RecrawlUrisRequest( + site_search_engine="site_search_engine_value", + uris=['uris_value1', 'uris_value2'], + ) + + # Make the request + operation = client.recrawl_uris(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.RecrawlUrisRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.RecrawlUris] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.RecrawlUrisResponse` Response message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.RecrawlUris] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.RecrawlUrisRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, site_search_engine_service.RecrawlUrisRequest): + request = site_search_engine_service.RecrawlUrisRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.recrawl_uris] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine_service.RecrawlUrisResponse, + metadata_type=site_search_engine_service.RecrawlUrisMetadata, + ) + + # Done; return the response. + return response + + def batch_verify_target_sites( + self, + request: Optional[ + Union[site_search_engine_service.BatchVerifyTargetSitesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Verify target sites' ownership and validity. + This API sends all the target sites under site search + engine for verification. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_batch_verify_target_sites(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchVerifyTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.batch_verify_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.BatchVerifyTargetSitesRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchVerifyTargetSites] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1beta.types.BatchVerifyTargetSitesResponse` Response message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchVerifyTargetSites] + method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.BatchVerifyTargetSitesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.BatchVerifyTargetSitesRequest + ): + request = site_search_engine_service.BatchVerifyTargetSitesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_verify_target_sites + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + site_search_engine_service.BatchVerifyTargetSitesResponse, + metadata_type=site_search_engine_service.BatchVerifyTargetSitesMetadata, + ) + + # Done; return the response. + return response + + def fetch_domain_verification_status( + self, + request: Optional[ + Union[site_search_engine_service.FetchDomainVerificationStatusRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchDomainVerificationStatusPager: + r"""Returns list of target sites with its domain verification + status. This method can only be called under data store with + BASIC_SITE_SEARCH state at the moment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_fetch_domain_verification_status(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.FetchDomainVerificationStatusRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + page_result = client.fetch_domain_verification_status(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.FetchDomainVerificationStatusRequest, dict]): + The request object. Request message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.services.site_search_engine_service.pagers.FetchDomainVerificationStatusPager: + Response message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a site_search_engine_service.FetchDomainVerificationStatusRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, site_search_engine_service.FetchDomainVerificationStatusRequest + ): + request = site_search_engine_service.FetchDomainVerificationStatusRequest( + request + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.fetch_domain_verification_status + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("site_search_engine", request.site_search_engine),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchDomainVerificationStatusPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SiteSearchEngineServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SiteSearchEngineServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/pagers.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/pagers.py new file mode 100644 index 000000000000..49f8cd3346ed --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/pagers.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.discoveryengine_v1beta.types import ( + site_search_engine, + site_search_engine_service, +) + + +class ListTargetSitesPager: + """A pager for iterating through ``list_target_sites`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.ListTargetSitesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``target_sites`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTargetSites`` requests and continue to iterate + through the ``target_sites`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.ListTargetSitesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., site_search_engine_service.ListTargetSitesResponse], + request: site_search_engine_service.ListTargetSitesRequest, + response: site_search_engine_service.ListTargetSitesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.ListTargetSitesRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.ListTargetSitesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = site_search_engine_service.ListTargetSitesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[site_search_engine_service.ListTargetSitesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[site_search_engine.TargetSite]: + for page in self.pages: + yield from page.target_sites + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTargetSitesAsyncPager: + """A pager for iterating through ``list_target_sites`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.ListTargetSitesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``target_sites`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTargetSites`` requests and continue to iterate + through the ``target_sites`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.ListTargetSitesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[site_search_engine_service.ListTargetSitesResponse] + ], + request: site_search_engine_service.ListTargetSitesRequest, + response: site_search_engine_service.ListTargetSitesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.ListTargetSitesRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.ListTargetSitesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = site_search_engine_service.ListTargetSitesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[site_search_engine_service.ListTargetSitesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[site_search_engine.TargetSite]: + async def async_generator(): + async for page in self.pages: + for response in page.target_sites: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchDomainVerificationStatusPager: + """A pager for iterating through ``fetch_domain_verification_status`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.FetchDomainVerificationStatusResponse` object, and + provides an ``__iter__`` method to iterate through its + ``target_sites`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchDomainVerificationStatus`` requests and continue to iterate + through the ``target_sites`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.FetchDomainVerificationStatusResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., site_search_engine_service.FetchDomainVerificationStatusResponse + ], + request: site_search_engine_service.FetchDomainVerificationStatusRequest, + response: site_search_engine_service.FetchDomainVerificationStatusResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.FetchDomainVerificationStatusRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.FetchDomainVerificationStatusResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = site_search_engine_service.FetchDomainVerificationStatusRequest( + request + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[site_search_engine_service.FetchDomainVerificationStatusResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[site_search_engine.TargetSite]: + for page in self.pages: + yield from page.target_sites + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchDomainVerificationStatusAsyncPager: + """A pager for iterating through ``fetch_domain_verification_status`` requests. + + This class thinly wraps an initial + :class:`google.cloud.discoveryengine_v1beta.types.FetchDomainVerificationStatusResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``target_sites`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchDomainVerificationStatus`` requests and continue to iterate + through the ``target_sites`` field on the + corresponding responses. + + All the usual :class:`google.cloud.discoveryengine_v1beta.types.FetchDomainVerificationStatusResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + Awaitable[site_search_engine_service.FetchDomainVerificationStatusResponse], + ], + request: site_search_engine_service.FetchDomainVerificationStatusRequest, + response: site_search_engine_service.FetchDomainVerificationStatusResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.discoveryengine_v1beta.types.FetchDomainVerificationStatusRequest): + The initial request object. + response (google.cloud.discoveryengine_v1beta.types.FetchDomainVerificationStatusResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = site_search_engine_service.FetchDomainVerificationStatusRequest( + request + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[ + site_search_engine_service.FetchDomainVerificationStatusResponse + ]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[site_search_engine.TargetSite]: + async def async_generator(): + async for page in self.pages: + for response in page.target_sites: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/__init__.py new file mode 100644 index 000000000000..398fe08609e9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SiteSearchEngineServiceTransport +from .grpc import SiteSearchEngineServiceGrpcTransport +from .grpc_asyncio import SiteSearchEngineServiceGrpcAsyncIOTransport +from .rest import ( + SiteSearchEngineServiceRestInterceptor, + SiteSearchEngineServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[SiteSearchEngineServiceTransport]] +_transport_registry["grpc"] = SiteSearchEngineServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SiteSearchEngineServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SiteSearchEngineServiceRestTransport + +__all__ = ( + "SiteSearchEngineServiceTransport", + "SiteSearchEngineServiceGrpcTransport", + "SiteSearchEngineServiceGrpcAsyncIOTransport", + "SiteSearchEngineServiceRestTransport", + "SiteSearchEngineServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/base.py new file mode 100644 index 000000000000..c7bb9c5ce3f0 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/base.py @@ -0,0 +1,353 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1beta import gapic_version as package_version +from google.cloud.discoveryengine_v1beta.types import ( + site_search_engine, + site_search_engine_service, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class SiteSearchEngineServiceTransport(abc.ABC): + """Abstract transport class for SiteSearchEngineService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_site_search_engine: gapic_v1.method.wrap_method( + self.get_site_search_engine, + default_timeout=None, + client_info=client_info, + ), + self.create_target_site: gapic_v1.method.wrap_method( + self.create_target_site, + default_timeout=None, + client_info=client_info, + ), + self.batch_create_target_sites: gapic_v1.method.wrap_method( + self.batch_create_target_sites, + default_timeout=None, + client_info=client_info, + ), + self.get_target_site: gapic_v1.method.wrap_method( + self.get_target_site, + default_timeout=None, + client_info=client_info, + ), + self.update_target_site: gapic_v1.method.wrap_method( + self.update_target_site, + default_timeout=None, + client_info=client_info, + ), + self.delete_target_site: gapic_v1.method.wrap_method( + self.delete_target_site, + default_timeout=None, + client_info=client_info, + ), + self.list_target_sites: gapic_v1.method.wrap_method( + self.list_target_sites, + default_timeout=None, + client_info=client_info, + ), + self.enable_advanced_site_search: gapic_v1.method.wrap_method( + self.enable_advanced_site_search, + default_timeout=None, + client_info=client_info, + ), + self.disable_advanced_site_search: gapic_v1.method.wrap_method( + self.disable_advanced_site_search, + default_timeout=None, + client_info=client_info, + ), + self.recrawl_uris: gapic_v1.method.wrap_method( + self.recrawl_uris, + default_timeout=None, + client_info=client_info, + ), + self.batch_verify_target_sites: gapic_v1.method.wrap_method( + self.batch_verify_target_sites, + default_timeout=None, + client_info=client_info, + ), + self.fetch_domain_verification_status: gapic_v1.method.wrap_method( + self.fetch_domain_verification_status, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def get_site_search_engine( + self, + ) -> Callable[ + [site_search_engine_service.GetSiteSearchEngineRequest], + Union[ + site_search_engine.SiteSearchEngine, + Awaitable[site_search_engine.SiteSearchEngine], + ], + ]: + raise NotImplementedError() + + @property + def create_target_site( + self, + ) -> Callable[ + [site_search_engine_service.CreateTargetSiteRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def batch_create_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchCreateTargetSitesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_target_site( + self, + ) -> Callable[ + [site_search_engine_service.GetTargetSiteRequest], + Union[site_search_engine.TargetSite, Awaitable[site_search_engine.TargetSite]], + ]: + raise NotImplementedError() + + @property + def update_target_site( + self, + ) -> Callable[ + [site_search_engine_service.UpdateTargetSiteRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_target_site( + self, + ) -> Callable[ + [site_search_engine_service.DeleteTargetSiteRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.ListTargetSitesRequest], + Union[ + site_search_engine_service.ListTargetSitesResponse, + Awaitable[site_search_engine_service.ListTargetSitesResponse], + ], + ]: + raise NotImplementedError() + + @property + def enable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.EnableAdvancedSiteSearchRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def disable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.DisableAdvancedSiteSearchRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def recrawl_uris( + self, + ) -> Callable[ + [site_search_engine_service.RecrawlUrisRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def batch_verify_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchVerifyTargetSitesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def fetch_domain_verification_status( + self, + ) -> Callable[ + [site_search_engine_service.FetchDomainVerificationStatusRequest], + Union[ + site_search_engine_service.FetchDomainVerificationStatusResponse, + Awaitable[site_search_engine_service.FetchDomainVerificationStatusResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SiteSearchEngineServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/grpc.py new file mode 100644 index 000000000000..4610f3869c49 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/grpc.py @@ -0,0 +1,655 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1beta.types import ( + site_search_engine, + site_search_engine_service, +) + +from .base import DEFAULT_CLIENT_INFO, SiteSearchEngineServiceTransport + + +class SiteSearchEngineServiceGrpcTransport(SiteSearchEngineServiceTransport): + """gRPC backend transport for SiteSearchEngineService. + + Service for managing site search related resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def get_site_search_engine( + self, + ) -> Callable[ + [site_search_engine_service.GetSiteSearchEngineRequest], + site_search_engine.SiteSearchEngine, + ]: + r"""Return a callable for the get site search engine method over gRPC. + + Gets the + [SiteSearchEngine][google.cloud.discoveryengine.v1beta.SiteSearchEngine]. + + Returns: + Callable[[~.GetSiteSearchEngineRequest], + ~.SiteSearchEngine]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_site_search_engine" not in self._stubs: + self._stubs["get_site_search_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/GetSiteSearchEngine", + request_serializer=site_search_engine_service.GetSiteSearchEngineRequest.serialize, + response_deserializer=site_search_engine.SiteSearchEngine.deserialize, + ) + return self._stubs["get_site_search_engine"] + + @property + def create_target_site( + self, + ) -> Callable[ + [site_search_engine_service.CreateTargetSiteRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create target site method over gRPC. + + Creates a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + Returns: + Callable[[~.CreateTargetSiteRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_target_site" not in self._stubs: + self._stubs["create_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/CreateTargetSite", + request_serializer=site_search_engine_service.CreateTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_target_site"] + + @property + def batch_create_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchCreateTargetSitesRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the batch create target sites method over gRPC. + + Creates + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] in + a batch. + + Returns: + Callable[[~.BatchCreateTargetSitesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_target_sites" not in self._stubs: + self._stubs["batch_create_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/BatchCreateTargetSites", + request_serializer=site_search_engine_service.BatchCreateTargetSitesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_create_target_sites"] + + @property + def get_target_site( + self, + ) -> Callable[ + [site_search_engine_service.GetTargetSiteRequest], site_search_engine.TargetSite + ]: + r"""Return a callable for the get target site method over gRPC. + + Gets a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + Returns: + Callable[[~.GetTargetSiteRequest], + ~.TargetSite]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_target_site" not in self._stubs: + self._stubs["get_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/GetTargetSite", + request_serializer=site_search_engine_service.GetTargetSiteRequest.serialize, + response_deserializer=site_search_engine.TargetSite.deserialize, + ) + return self._stubs["get_target_site"] + + @property + def update_target_site( + self, + ) -> Callable[ + [site_search_engine_service.UpdateTargetSiteRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update target site method over gRPC. + + Updates a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + Returns: + Callable[[~.UpdateTargetSiteRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_target_site" not in self._stubs: + self._stubs["update_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/UpdateTargetSite", + request_serializer=site_search_engine_service.UpdateTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_target_site"] + + @property + def delete_target_site( + self, + ) -> Callable[ + [site_search_engine_service.DeleteTargetSiteRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete target site method over gRPC. + + Deletes a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + Returns: + Callable[[~.DeleteTargetSiteRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_target_site" not in self._stubs: + self._stubs["delete_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/DeleteTargetSite", + request_serializer=site_search_engine_service.DeleteTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_target_site"] + + @property + def list_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.ListTargetSitesRequest], + site_search_engine_service.ListTargetSitesResponse, + ]: + r"""Return a callable for the list target sites method over gRPC. + + Gets a list of + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]s. + + Returns: + Callable[[~.ListTargetSitesRequest], + ~.ListTargetSitesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_target_sites" not in self._stubs: + self._stubs["list_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/ListTargetSites", + request_serializer=site_search_engine_service.ListTargetSitesRequest.serialize, + response_deserializer=site_search_engine_service.ListTargetSitesResponse.deserialize, + ) + return self._stubs["list_target_sites"] + + @property + def enable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.EnableAdvancedSiteSearchRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the enable advanced site search method over gRPC. + + Upgrade from basic site search to advanced site + search. + + Returns: + Callable[[~.EnableAdvancedSiteSearchRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enable_advanced_site_search" not in self._stubs: + self._stubs["enable_advanced_site_search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/EnableAdvancedSiteSearch", + request_serializer=site_search_engine_service.EnableAdvancedSiteSearchRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["enable_advanced_site_search"] + + @property + def disable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.DisableAdvancedSiteSearchRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the disable advanced site search method over gRPC. + + Downgrade from advanced site search to basic site + search. + + Returns: + Callable[[~.DisableAdvancedSiteSearchRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "disable_advanced_site_search" not in self._stubs: + self._stubs["disable_advanced_site_search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/DisableAdvancedSiteSearch", + request_serializer=site_search_engine_service.DisableAdvancedSiteSearchRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["disable_advanced_site_search"] + + @property + def recrawl_uris( + self, + ) -> Callable[ + [site_search_engine_service.RecrawlUrisRequest], operations_pb2.Operation + ]: + r"""Return a callable for the recrawl uris method over gRPC. + + Request on-demand recrawl for a list of URIs. + + Returns: + Callable[[~.RecrawlUrisRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "recrawl_uris" not in self._stubs: + self._stubs["recrawl_uris"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/RecrawlUris", + request_serializer=site_search_engine_service.RecrawlUrisRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["recrawl_uris"] + + @property + def batch_verify_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchVerifyTargetSitesRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the batch verify target sites method over gRPC. + + Verify target sites' ownership and validity. + This API sends all the target sites under site search + engine for verification. + + Returns: + Callable[[~.BatchVerifyTargetSitesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_verify_target_sites" not in self._stubs: + self._stubs["batch_verify_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/BatchVerifyTargetSites", + request_serializer=site_search_engine_service.BatchVerifyTargetSitesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_verify_target_sites"] + + @property + def fetch_domain_verification_status( + self, + ) -> Callable[ + [site_search_engine_service.FetchDomainVerificationStatusRequest], + site_search_engine_service.FetchDomainVerificationStatusResponse, + ]: + r"""Return a callable for the fetch domain verification + status method over gRPC. + + Returns list of target sites with its domain verification + status. This method can only be called under data store with + BASIC_SITE_SEARCH state at the moment. + + Returns: + Callable[[~.FetchDomainVerificationStatusRequest], + ~.FetchDomainVerificationStatusResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_domain_verification_status" not in self._stubs: + self._stubs[ + "fetch_domain_verification_status" + ] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/FetchDomainVerificationStatus", + request_serializer=site_search_engine_service.FetchDomainVerificationStatusRequest.serialize, + response_deserializer=site_search_engine_service.FetchDomainVerificationStatusResponse.deserialize, + ) + return self._stubs["fetch_domain_verification_status"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SiteSearchEngineServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a21f2da94b01 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/grpc_asyncio.py @@ -0,0 +1,661 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1beta.types import ( + site_search_engine, + site_search_engine_service, +) + +from .base import DEFAULT_CLIENT_INFO, SiteSearchEngineServiceTransport +from .grpc import SiteSearchEngineServiceGrpcTransport + + +class SiteSearchEngineServiceGrpcAsyncIOTransport(SiteSearchEngineServiceTransport): + """gRPC AsyncIO backend transport for SiteSearchEngineService. + + Service for managing site search related resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def get_site_search_engine( + self, + ) -> Callable[ + [site_search_engine_service.GetSiteSearchEngineRequest], + Awaitable[site_search_engine.SiteSearchEngine], + ]: + r"""Return a callable for the get site search engine method over gRPC. + + Gets the + [SiteSearchEngine][google.cloud.discoveryengine.v1beta.SiteSearchEngine]. + + Returns: + Callable[[~.GetSiteSearchEngineRequest], + Awaitable[~.SiteSearchEngine]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_site_search_engine" not in self._stubs: + self._stubs["get_site_search_engine"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/GetSiteSearchEngine", + request_serializer=site_search_engine_service.GetSiteSearchEngineRequest.serialize, + response_deserializer=site_search_engine.SiteSearchEngine.deserialize, + ) + return self._stubs["get_site_search_engine"] + + @property + def create_target_site( + self, + ) -> Callable[ + [site_search_engine_service.CreateTargetSiteRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create target site method over gRPC. + + Creates a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + Returns: + Callable[[~.CreateTargetSiteRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_target_site" not in self._stubs: + self._stubs["create_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/CreateTargetSite", + request_serializer=site_search_engine_service.CreateTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_target_site"] + + @property + def batch_create_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchCreateTargetSitesRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the batch create target sites method over gRPC. + + Creates + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] in + a batch. + + Returns: + Callable[[~.BatchCreateTargetSitesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_target_sites" not in self._stubs: + self._stubs["batch_create_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/BatchCreateTargetSites", + request_serializer=site_search_engine_service.BatchCreateTargetSitesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_create_target_sites"] + + @property + def get_target_site( + self, + ) -> Callable[ + [site_search_engine_service.GetTargetSiteRequest], + Awaitable[site_search_engine.TargetSite], + ]: + r"""Return a callable for the get target site method over gRPC. + + Gets a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + Returns: + Callable[[~.GetTargetSiteRequest], + Awaitable[~.TargetSite]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_target_site" not in self._stubs: + self._stubs["get_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/GetTargetSite", + request_serializer=site_search_engine_service.GetTargetSiteRequest.serialize, + response_deserializer=site_search_engine.TargetSite.deserialize, + ) + return self._stubs["get_target_site"] + + @property + def update_target_site( + self, + ) -> Callable[ + [site_search_engine_service.UpdateTargetSiteRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update target site method over gRPC. + + Updates a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + Returns: + Callable[[~.UpdateTargetSiteRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_target_site" not in self._stubs: + self._stubs["update_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/UpdateTargetSite", + request_serializer=site_search_engine_service.UpdateTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_target_site"] + + @property + def delete_target_site( + self, + ) -> Callable[ + [site_search_engine_service.DeleteTargetSiteRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete target site method over gRPC. + + Deletes a + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + + Returns: + Callable[[~.DeleteTargetSiteRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_target_site" not in self._stubs: + self._stubs["delete_target_site"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/DeleteTargetSite", + request_serializer=site_search_engine_service.DeleteTargetSiteRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_target_site"] + + @property + def list_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.ListTargetSitesRequest], + Awaitable[site_search_engine_service.ListTargetSitesResponse], + ]: + r"""Return a callable for the list target sites method over gRPC. + + Gets a list of + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]s. + + Returns: + Callable[[~.ListTargetSitesRequest], + Awaitable[~.ListTargetSitesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_target_sites" not in self._stubs: + self._stubs["list_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/ListTargetSites", + request_serializer=site_search_engine_service.ListTargetSitesRequest.serialize, + response_deserializer=site_search_engine_service.ListTargetSitesResponse.deserialize, + ) + return self._stubs["list_target_sites"] + + @property + def enable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.EnableAdvancedSiteSearchRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the enable advanced site search method over gRPC. + + Upgrade from basic site search to advanced site + search. + + Returns: + Callable[[~.EnableAdvancedSiteSearchRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enable_advanced_site_search" not in self._stubs: + self._stubs["enable_advanced_site_search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/EnableAdvancedSiteSearch", + request_serializer=site_search_engine_service.EnableAdvancedSiteSearchRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["enable_advanced_site_search"] + + @property + def disable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.DisableAdvancedSiteSearchRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the disable advanced site search method over gRPC. + + Downgrade from advanced site search to basic site + search. + + Returns: + Callable[[~.DisableAdvancedSiteSearchRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "disable_advanced_site_search" not in self._stubs: + self._stubs["disable_advanced_site_search"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/DisableAdvancedSiteSearch", + request_serializer=site_search_engine_service.DisableAdvancedSiteSearchRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["disable_advanced_site_search"] + + @property + def recrawl_uris( + self, + ) -> Callable[ + [site_search_engine_service.RecrawlUrisRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the recrawl uris method over gRPC. + + Request on-demand recrawl for a list of URIs. + + Returns: + Callable[[~.RecrawlUrisRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "recrawl_uris" not in self._stubs: + self._stubs["recrawl_uris"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/RecrawlUris", + request_serializer=site_search_engine_service.RecrawlUrisRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["recrawl_uris"] + + @property + def batch_verify_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchVerifyTargetSitesRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the batch verify target sites method over gRPC. + + Verify target sites' ownership and validity. + This API sends all the target sites under site search + engine for verification. + + Returns: + Callable[[~.BatchVerifyTargetSitesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_verify_target_sites" not in self._stubs: + self._stubs["batch_verify_target_sites"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/BatchVerifyTargetSites", + request_serializer=site_search_engine_service.BatchVerifyTargetSitesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_verify_target_sites"] + + @property + def fetch_domain_verification_status( + self, + ) -> Callable[ + [site_search_engine_service.FetchDomainVerificationStatusRequest], + Awaitable[site_search_engine_service.FetchDomainVerificationStatusResponse], + ]: + r"""Return a callable for the fetch domain verification + status method over gRPC. + + Returns list of target sites with its domain verification + status. This method can only be called under data store with + BASIC_SITE_SEARCH state at the moment. + + Returns: + Callable[[~.FetchDomainVerificationStatusRequest], + Awaitable[~.FetchDomainVerificationStatusResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_domain_verification_status" not in self._stubs: + self._stubs[ + "fetch_domain_verification_status" + ] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.SiteSearchEngineService/FetchDomainVerificationStatus", + request_serializer=site_search_engine_service.FetchDomainVerificationStatusRequest.serialize, + response_deserializer=site_search_engine_service.FetchDomainVerificationStatusResponse.deserialize, + ) + return self._stubs["fetch_domain_verification_status"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("SiteSearchEngineServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/rest.py new file mode 100644 index 000000000000..264fb6a7ef80 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/rest.py @@ -0,0 +1,2370 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1beta.types import ( + site_search_engine, + site_search_engine_service, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SiteSearchEngineServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SiteSearchEngineServiceRestInterceptor: + """Interceptor for SiteSearchEngineService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SiteSearchEngineServiceRestTransport. + + .. code-block:: python + class MyCustomSiteSearchEngineServiceInterceptor(SiteSearchEngineServiceRestInterceptor): + def pre_batch_create_target_sites(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_target_sites(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_batch_verify_target_sites(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_verify_target_sites(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_target_site(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_target_site(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_target_site(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_target_site(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_disable_advanced_site_search(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_disable_advanced_site_search(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_enable_advanced_site_search(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_enable_advanced_site_search(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_domain_verification_status(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_domain_verification_status(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_site_search_engine(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_site_search_engine(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_target_site(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_target_site(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_target_sites(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_target_sites(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_recrawl_uris(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_recrawl_uris(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_target_site(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_target_site(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SiteSearchEngineServiceRestTransport(interceptor=MyCustomSiteSearchEngineServiceInterceptor()) + client = SiteSearchEngineServiceClient(transport=transport) + + + """ + + def pre_batch_create_target_sites( + self, + request: site_search_engine_service.BatchCreateTargetSitesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.BatchCreateTargetSitesRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for batch_create_target_sites + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_batch_create_target_sites( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for batch_create_target_sites + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_batch_verify_target_sites( + self, + request: site_search_engine_service.BatchVerifyTargetSitesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.BatchVerifyTargetSitesRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for batch_verify_target_sites + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_batch_verify_target_sites( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for batch_verify_target_sites + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_create_target_site( + self, + request: site_search_engine_service.CreateTargetSiteRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.CreateTargetSiteRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_target_site + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_create_target_site( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_target_site + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_delete_target_site( + self, + request: site_search_engine_service.DeleteTargetSiteRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.DeleteTargetSiteRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_target_site + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_delete_target_site( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_target_site + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_disable_advanced_site_search( + self, + request: site_search_engine_service.DisableAdvancedSiteSearchRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.DisableAdvancedSiteSearchRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for disable_advanced_site_search + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_disable_advanced_site_search( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for disable_advanced_site_search + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_enable_advanced_site_search( + self, + request: site_search_engine_service.EnableAdvancedSiteSearchRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.EnableAdvancedSiteSearchRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for enable_advanced_site_search + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_enable_advanced_site_search( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for enable_advanced_site_search + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_fetch_domain_verification_status( + self, + request: site_search_engine_service.FetchDomainVerificationStatusRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.FetchDomainVerificationStatusRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for fetch_domain_verification_status + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_fetch_domain_verification_status( + self, response: site_search_engine_service.FetchDomainVerificationStatusResponse + ) -> site_search_engine_service.FetchDomainVerificationStatusResponse: + """Post-rpc interceptor for fetch_domain_verification_status + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_get_site_search_engine( + self, + request: site_search_engine_service.GetSiteSearchEngineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.GetSiteSearchEngineRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_site_search_engine + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_get_site_search_engine( + self, response: site_search_engine.SiteSearchEngine + ) -> site_search_engine.SiteSearchEngine: + """Post-rpc interceptor for get_site_search_engine + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_get_target_site( + self, + request: site_search_engine_service.GetTargetSiteRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.GetTargetSiteRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_target_site + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_get_target_site( + self, response: site_search_engine.TargetSite + ) -> site_search_engine.TargetSite: + """Post-rpc interceptor for get_target_site + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_list_target_sites( + self, + request: site_search_engine_service.ListTargetSitesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.ListTargetSitesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_target_sites + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_list_target_sites( + self, response: site_search_engine_service.ListTargetSitesResponse + ) -> site_search_engine_service.ListTargetSitesResponse: + """Post-rpc interceptor for list_target_sites + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_recrawl_uris( + self, + request: site_search_engine_service.RecrawlUrisRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.RecrawlUrisRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for recrawl_uris + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_recrawl_uris( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for recrawl_uris + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_update_target_site( + self, + request: site_search_engine_service.UpdateTargetSiteRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + site_search_engine_service.UpdateTargetSiteRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_target_site + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_update_target_site( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_target_site + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SiteSearchEngineService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the SiteSearchEngineService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SiteSearchEngineServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SiteSearchEngineServiceRestInterceptor + + +class SiteSearchEngineServiceRestTransport(SiteSearchEngineServiceTransport): + """REST backend transport for SiteSearchEngineService. + + Service for managing site search related resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SiteSearchEngineServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SiteSearchEngineServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _BatchCreateTargetSites(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("BatchCreateTargetSites") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.BatchCreateTargetSitesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the batch create target sites method over HTTP. + + Args: + request (~.site_search_engine_service.BatchCreateTargetSitesRequest): + The request object. Request message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchCreateTargetSites] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*/siteSearchEngine}/targetSites:batchCreate", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/targetSites:batchCreate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_create_target_sites( + request, metadata + ) + pb_request = site_search_engine_service.BatchCreateTargetSitesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_create_target_sites(resp) + return resp + + class _BatchVerifyTargetSites(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("BatchVerifyTargetSites") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.BatchVerifyTargetSitesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the batch verify target sites method over HTTP. + + Args: + request (~.site_search_engine_service.BatchVerifyTargetSitesRequest): + The request object. Request message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchVerifyTargetSites] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}:batchVerifyTargetSites", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_verify_target_sites( + request, metadata + ) + pb_request = site_search_engine_service.BatchVerifyTargetSitesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_verify_target_sites(resp) + return resp + + class _CreateTargetSite(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("CreateTargetSite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.CreateTargetSiteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create target site method over HTTP. + + Args: + request (~.site_search_engine_service.CreateTargetSiteRequest): + The request object. Request message for + [SiteSearchEngineService.CreateTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.CreateTargetSite] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*/siteSearchEngine}/targetSites", + "body": "target_site", + }, + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/targetSites", + "body": "target_site", + }, + ] + request, metadata = self._interceptor.pre_create_target_site( + request, metadata + ) + pb_request = site_search_engine_service.CreateTargetSiteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_target_site(resp) + return resp + + class _DeleteTargetSite(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("DeleteTargetSite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.DeleteTargetSiteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete target site method over HTTP. + + Args: + request (~.site_search_engine_service.DeleteTargetSiteRequest): + The request object. Request message for + [SiteSearchEngineService.DeleteTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DeleteTargetSite] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}", + }, + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_target_site( + request, metadata + ) + pb_request = site_search_engine_service.DeleteTargetSiteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_target_site(resp) + return resp + + class _DisableAdvancedSiteSearch(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("DisableAdvancedSiteSearch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.DisableAdvancedSiteSearchRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the disable advanced site + search method over HTTP. + + Args: + request (~.site_search_engine_service.DisableAdvancedSiteSearchRequest): + The request object. Request message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{site_search_engine=projects/*/locations/*/dataStores/*/siteSearchEngine}:disableAdvancedSiteSearch", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{site_search_engine=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}:disableAdvancedSiteSearch", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_disable_advanced_site_search( + request, metadata + ) + pb_request = site_search_engine_service.DisableAdvancedSiteSearchRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_disable_advanced_site_search(resp) + return resp + + class _EnableAdvancedSiteSearch(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("EnableAdvancedSiteSearch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.EnableAdvancedSiteSearchRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the enable advanced site + search method over HTTP. + + Args: + request (~.site_search_engine_service.EnableAdvancedSiteSearchRequest): + The request object. Request message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{site_search_engine=projects/*/locations/*/dataStores/*/siteSearchEngine}:enableAdvancedSiteSearch", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{site_search_engine=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}:enableAdvancedSiteSearch", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_enable_advanced_site_search( + request, metadata + ) + pb_request = site_search_engine_service.EnableAdvancedSiteSearchRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_enable_advanced_site_search(resp) + return resp + + class _FetchDomainVerificationStatus(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("FetchDomainVerificationStatus") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.FetchDomainVerificationStatusRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine_service.FetchDomainVerificationStatusResponse: + r"""Call the fetch domain verification + status method over HTTP. + + Args: + request (~.site_search_engine_service.FetchDomainVerificationStatusRequest): + The request object. Request message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.site_search_engine_service.FetchDomainVerificationStatusResponse: + Response message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{site_search_engine=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}:fetchDomainVerificationStatus", + }, + ] + request, metadata = self._interceptor.pre_fetch_domain_verification_status( + request, metadata + ) + pb_request = ( + site_search_engine_service.FetchDomainVerificationStatusRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = site_search_engine_service.FetchDomainVerificationStatusResponse() + pb_resp = ( + site_search_engine_service.FetchDomainVerificationStatusResponse.pb( + resp + ) + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_domain_verification_status(resp) + return resp + + class _GetSiteSearchEngine(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("GetSiteSearchEngine") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.GetSiteSearchEngineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.SiteSearchEngine: + r"""Call the get site search engine method over HTTP. + + Args: + request (~.site_search_engine_service.GetSiteSearchEngineRequest): + The request object. Request message for + [SiteSearchEngineService.GetSiteSearchEngine][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.GetSiteSearchEngine] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.site_search_engine.SiteSearchEngine: + SiteSearchEngine captures DataStore + level site search persisting + configurations. It is a singleton value + per data store. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/siteSearchEngine}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}", + }, + ] + request, metadata = self._interceptor.pre_get_site_search_engine( + request, metadata + ) + pb_request = site_search_engine_service.GetSiteSearchEngineRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = site_search_engine.SiteSearchEngine() + pb_resp = site_search_engine.SiteSearchEngine.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_site_search_engine(resp) + return resp + + class _GetTargetSite(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("GetTargetSite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.GetTargetSiteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine.TargetSite: + r"""Call the get target site method over HTTP. + + Args: + request (~.site_search_engine_service.GetTargetSiteRequest): + The request object. Request message for + [SiteSearchEngineService.GetTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.GetTargetSite] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.site_search_engine.TargetSite: + A target site for the + SiteSearchEngine. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/*}", + }, + ] + request, metadata = self._interceptor.pre_get_target_site(request, metadata) + pb_request = site_search_engine_service.GetTargetSiteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = site_search_engine.TargetSite() + pb_resp = site_search_engine.TargetSite.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_target_site(resp) + return resp + + class _ListTargetSites(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("ListTargetSites") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.ListTargetSitesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> site_search_engine_service.ListTargetSitesResponse: + r"""Call the list target sites method over HTTP. + + Args: + request (~.site_search_engine_service.ListTargetSitesRequest): + The request object. Request message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.ListTargetSites] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.site_search_engine_service.ListTargetSitesResponse: + Response message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.ListTargetSites] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*/siteSearchEngine}/targetSites", + }, + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/targetSites", + }, + ] + request, metadata = self._interceptor.pre_list_target_sites( + request, metadata + ) + pb_request = site_search_engine_service.ListTargetSitesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = site_search_engine_service.ListTargetSitesResponse() + pb_resp = site_search_engine_service.ListTargetSitesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_target_sites(resp) + return resp + + class _RecrawlUris(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("RecrawlUris") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.RecrawlUrisRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the recrawl uris method over HTTP. + + Args: + request (~.site_search_engine_service.RecrawlUrisRequest): + The request object. Request message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.RecrawlUris] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{site_search_engine=projects/*/locations/*/dataStores/*/siteSearchEngine}:recrawlUris", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta/{site_search_engine=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}:recrawlUris", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_recrawl_uris(request, metadata) + pb_request = site_search_engine_service.RecrawlUrisRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_recrawl_uris(resp) + return resp + + class _UpdateTargetSite(SiteSearchEngineServiceRestStub): + def __hash__(self): + return hash("UpdateTargetSite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: site_search_engine_service.UpdateTargetSiteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update target site method over HTTP. + + Args: + request (~.site_search_engine_service.UpdateTargetSiteRequest): + The request object. Request message for + [SiteSearchEngineService.UpdateTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.UpdateTargetSite] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{target_site.name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}", + "body": "target_site", + }, + { + "method": "patch", + "uri": "/v1beta/{target_site.name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/*}", + "body": "target_site", + }, + ] + request, metadata = self._interceptor.pre_update_target_site( + request, metadata + ) + pb_request = site_search_engine_service.UpdateTargetSiteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_target_site(resp) + return resp + + @property + def batch_create_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchCreateTargetSitesRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateTargetSites(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_verify_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.BatchVerifyTargetSitesRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchVerifyTargetSites(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_target_site( + self, + ) -> Callable[ + [site_search_engine_service.CreateTargetSiteRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTargetSite(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_target_site( + self, + ) -> Callable[ + [site_search_engine_service.DeleteTargetSiteRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTargetSite(self._session, self._host, self._interceptor) # type: ignore + + @property + def disable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.DisableAdvancedSiteSearchRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DisableAdvancedSiteSearch(self._session, self._host, self._interceptor) # type: ignore + + @property + def enable_advanced_site_search( + self, + ) -> Callable[ + [site_search_engine_service.EnableAdvancedSiteSearchRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnableAdvancedSiteSearch(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_domain_verification_status( + self, + ) -> Callable[ + [site_search_engine_service.FetchDomainVerificationStatusRequest], + site_search_engine_service.FetchDomainVerificationStatusResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchDomainVerificationStatus(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_site_search_engine( + self, + ) -> Callable[ + [site_search_engine_service.GetSiteSearchEngineRequest], + site_search_engine.SiteSearchEngine, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSiteSearchEngine(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_target_site( + self, + ) -> Callable[ + [site_search_engine_service.GetTargetSiteRequest], site_search_engine.TargetSite + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTargetSite(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_target_sites( + self, + ) -> Callable[ + [site_search_engine_service.ListTargetSitesRequest], + site_search_engine_service.ListTargetSitesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTargetSites(self._session, self._host, self._interceptor) # type: ignore + + @property + def recrawl_uris( + self, + ) -> Callable[ + [site_search_engine_service.RecrawlUrisRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RecrawlUris(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_target_site( + self, + ) -> Callable[ + [site_search_engine_service.UpdateTargetSiteRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTargetSite(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(SiteSearchEngineServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(SiteSearchEngineServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SiteSearchEngineServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py index 6c8696fe326a..b79ec506d6eb 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py @@ -332,6 +332,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -386,6 +390,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, ], "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", @@ -865,6 +873,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", @@ -979,6 +991,10 @@ def __call__( """ http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, { "method": "get", "uri": "/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py index 3f550172dede..279ae3989ee6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py @@ -13,7 +13,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .common import CustomAttribute, DoubleList, Interval, UserInfo +from .common import ( + CustomAttribute, + DoubleList, + EmbeddingConfig, + IndustryVertical, + Interval, + SearchAddOn, + SearchTier, + SolutionType, + UserInfo, +) +from .completion import SuggestionDenyListEntry from .completion_service import CompleteQueryRequest, CompleteQueryResponse from .conversation import ( Conversation, @@ -32,6 +43,17 @@ ListConversationsResponse, UpdateConversationRequest, ) +from .data_store import DataStore +from .data_store_service import ( + CreateDataStoreMetadata, + CreateDataStoreRequest, + DeleteDataStoreMetadata, + DeleteDataStoreRequest, + GetDataStoreRequest, + ListDataStoresRequest, + ListDataStoresResponse, + UpdateDataStoreRequest, +) from .document import Document from .document_service import ( CreateDocumentRequest, @@ -41,6 +63,17 @@ ListDocumentsResponse, UpdateDocumentRequest, ) +from .engine import Engine +from .engine_service import ( + CreateEngineMetadata, + CreateEngineRequest, + DeleteEngineMetadata, + DeleteEngineRequest, + GetEngineRequest, + ListEnginesRequest, + ListEnginesResponse, + UpdateEngineRequest, +) from .import_config import ( BigQuerySource, GcsSource, @@ -48,6 +81,9 @@ ImportDocumentsRequest, ImportDocumentsResponse, ImportErrorConfig, + ImportSuggestionDenyListEntriesMetadata, + ImportSuggestionDenyListEntriesRequest, + ImportSuggestionDenyListEntriesResponse, ImportUserEventsMetadata, ImportUserEventsRequest, ImportUserEventsResponse, @@ -56,6 +92,9 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeSuggestionDenyListEntriesMetadata, + PurgeSuggestionDenyListEntriesRequest, + PurgeSuggestionDenyListEntriesResponse, ) from .recommendation_service import RecommendRequest, RecommendResponse from .schema import Schema @@ -71,6 +110,43 @@ UpdateSchemaRequest, ) from .search_service import SearchRequest, SearchResponse +from .serving_config import ServingConfig +from .serving_config_service import ( + GetServingConfigRequest, + ListServingConfigsRequest, + ListServingConfigsResponse, + UpdateServingConfigRequest, +) +from .site_search_engine import SiteSearchEngine, SiteVerificationInfo, TargetSite +from .site_search_engine_service import ( + BatchCreateTargetSiteMetadata, + BatchCreateTargetSitesRequest, + BatchCreateTargetSitesResponse, + BatchVerifyTargetSitesMetadata, + BatchVerifyTargetSitesRequest, + BatchVerifyTargetSitesResponse, + CreateTargetSiteMetadata, + CreateTargetSiteRequest, + DeleteTargetSiteMetadata, + DeleteTargetSiteRequest, + DisableAdvancedSiteSearchMetadata, + DisableAdvancedSiteSearchRequest, + DisableAdvancedSiteSearchResponse, + EnableAdvancedSiteSearchMetadata, + EnableAdvancedSiteSearchRequest, + EnableAdvancedSiteSearchResponse, + FetchDomainVerificationStatusRequest, + FetchDomainVerificationStatusResponse, + GetSiteSearchEngineRequest, + GetTargetSiteRequest, + ListTargetSitesRequest, + ListTargetSitesResponse, + RecrawlUrisMetadata, + RecrawlUrisRequest, + RecrawlUrisResponse, + UpdateTargetSiteMetadata, + UpdateTargetSiteRequest, +) from .user_event import ( CompletionInfo, DocumentInfo, @@ -86,8 +162,14 @@ __all__ = ( "CustomAttribute", "DoubleList", + "EmbeddingConfig", "Interval", "UserInfo", + "IndustryVertical", + "SearchAddOn", + "SearchTier", + "SolutionType", + "SuggestionDenyListEntry", "CompleteQueryRequest", "CompleteQueryResponse", "Conversation", @@ -103,6 +185,15 @@ "ListConversationsRequest", "ListConversationsResponse", "UpdateConversationRequest", + "DataStore", + "CreateDataStoreMetadata", + "CreateDataStoreRequest", + "DeleteDataStoreMetadata", + "DeleteDataStoreRequest", + "GetDataStoreRequest", + "ListDataStoresRequest", + "ListDataStoresResponse", + "UpdateDataStoreRequest", "Document", "CreateDocumentRequest", "DeleteDocumentRequest", @@ -110,18 +201,33 @@ "ListDocumentsRequest", "ListDocumentsResponse", "UpdateDocumentRequest", + "Engine", + "CreateEngineMetadata", + "CreateEngineRequest", + "DeleteEngineMetadata", + "DeleteEngineRequest", + "GetEngineRequest", + "ListEnginesRequest", + "ListEnginesResponse", + "UpdateEngineRequest", "BigQuerySource", "GcsSource", "ImportDocumentsMetadata", "ImportDocumentsRequest", "ImportDocumentsResponse", "ImportErrorConfig", + "ImportSuggestionDenyListEntriesMetadata", + "ImportSuggestionDenyListEntriesRequest", + "ImportSuggestionDenyListEntriesResponse", "ImportUserEventsMetadata", "ImportUserEventsRequest", "ImportUserEventsResponse", "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeSuggestionDenyListEntriesMetadata", + "PurgeSuggestionDenyListEntriesRequest", + "PurgeSuggestionDenyListEntriesResponse", "RecommendRequest", "RecommendResponse", "Schema", @@ -136,6 +242,41 @@ "UpdateSchemaRequest", "SearchRequest", "SearchResponse", + "ServingConfig", + "GetServingConfigRequest", + "ListServingConfigsRequest", + "ListServingConfigsResponse", + "UpdateServingConfigRequest", + "SiteSearchEngine", + "SiteVerificationInfo", + "TargetSite", + "BatchCreateTargetSiteMetadata", + "BatchCreateTargetSitesRequest", + "BatchCreateTargetSitesResponse", + "BatchVerifyTargetSitesMetadata", + "BatchVerifyTargetSitesRequest", + "BatchVerifyTargetSitesResponse", + "CreateTargetSiteMetadata", + "CreateTargetSiteRequest", + "DeleteTargetSiteMetadata", + "DeleteTargetSiteRequest", + "DisableAdvancedSiteSearchMetadata", + "DisableAdvancedSiteSearchRequest", + "DisableAdvancedSiteSearchResponse", + "EnableAdvancedSiteSearchMetadata", + "EnableAdvancedSiteSearchRequest", + "EnableAdvancedSiteSearchResponse", + "FetchDomainVerificationStatusRequest", + "FetchDomainVerificationStatusResponse", + "GetSiteSearchEngineRequest", + "GetTargetSiteRequest", + "ListTargetSitesRequest", + "ListTargetSitesResponse", + "RecrawlUrisMetadata", + "RecrawlUrisRequest", + "RecrawlUrisResponse", + "UpdateTargetSiteMetadata", + "UpdateTargetSiteRequest", "CompletionInfo", "DocumentInfo", "MediaInfo", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/common.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/common.py index ce4c33b3c863..994776c1c0c4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/common.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/common.py @@ -22,14 +22,90 @@ __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1beta", manifest={ + "IndustryVertical", + "SolutionType", + "SearchTier", + "SearchAddOn", "Interval", "CustomAttribute", "UserInfo", + "EmbeddingConfig", "DoubleList", }, ) +class IndustryVertical(proto.Enum): + r"""The industry vertical associated with the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]. + + Values: + INDUSTRY_VERTICAL_UNSPECIFIED (0): + Value used when unset. + GENERIC (1): + The generic vertical for documents that are + not specific to any industry vertical. + MEDIA (2): + The media industry vertical. + """ + INDUSTRY_VERTICAL_UNSPECIFIED = 0 + GENERIC = 1 + MEDIA = 2 + + +class SolutionType(proto.Enum): + r"""The type of solution. + + Values: + SOLUTION_TYPE_UNSPECIFIED (0): + Default value. + SOLUTION_TYPE_RECOMMENDATION (1): + Used for Recommendations AI. + SOLUTION_TYPE_SEARCH (2): + Used for Discovery Search. + SOLUTION_TYPE_CHAT (3): + Used for use cases related to the Generative + AI agent. + """ + SOLUTION_TYPE_UNSPECIFIED = 0 + SOLUTION_TYPE_RECOMMENDATION = 1 + SOLUTION_TYPE_SEARCH = 2 + SOLUTION_TYPE_CHAT = 3 + + +class SearchTier(proto.Enum): + r"""Tiers of search features. Different tiers might have + different pricing. To learn more, please check the pricing + documentation. + + Values: + SEARCH_TIER_UNSPECIFIED (0): + Default value when the enum is unspecified. + This is invalid to use. + SEARCH_TIER_STANDARD (1): + Standard tier. + SEARCH_TIER_ENTERPRISE (2): + Enterprise tier. + """ + SEARCH_TIER_UNSPECIFIED = 0 + SEARCH_TIER_STANDARD = 1 + SEARCH_TIER_ENTERPRISE = 2 + + +class SearchAddOn(proto.Enum): + r"""Add-on that provides additional functionality for search. + + Values: + SEARCH_ADD_ON_UNSPECIFIED (0): + Default value when the enum is unspecified. + This is invalid to use. + SEARCH_ADD_ON_LLM (1): + Large language model add-on. + """ + SEARCH_ADD_ON_UNSPECIFIED = 0 + SEARCH_ADD_ON_LLM = 1 + + class Interval(proto.Message): r"""A floating point interval. @@ -164,6 +240,22 @@ class UserInfo(proto.Message): ) +class EmbeddingConfig(proto.Message): + r"""Defines embedding config, used for bring your own embeddings + feature. + + Attributes: + field_path (str): + Full field path in the schema mapped as + embedding field. + """ + + field_path: str = proto.Field( + proto.STRING, + number=1, + ) + + class DoubleList(proto.Message): r"""Double list. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/completion.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/completion.py new file mode 100644 index 000000000000..4e48ed5bfffe --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/completion.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "SuggestionDenyListEntry", + }, +) + + +class SuggestionDenyListEntry(proto.Message): + r"""Suggestion deny list entry identifying the phrase to block + from suggestions and the applied operation for the phrase. + + Attributes: + block_phrase (str): + Required. Phrase to block from suggestions + served. Can be maximum 125 characters. + match_operator (google.cloud.discoveryengine_v1beta.types.SuggestionDenyListEntry.MatchOperator): + Required. The match operator to apply for + this phrase. Whether to block the exact phrase, + or block any suggestions containing this phrase. + """ + + class MatchOperator(proto.Enum): + r"""Operator for matching with the generated suggestions. + + Values: + MATCH_OPERATOR_UNSPECIFIED (0): + Default value. Should not be used + EXACT_MATCH (1): + If the suggestion is an exact match to the block_phrase, + then block it. + CONTAINS (2): + If the suggestion contains the block_phrase, then block it. + """ + MATCH_OPERATOR_UNSPECIFIED = 0 + EXACT_MATCH = 1 + CONTAINS = 2 + + block_phrase: str = proto.Field( + proto.STRING, + number=1, + ) + match_operator: MatchOperator = proto.Field( + proto.ENUM, + number=2, + enum=MatchOperator, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/completion_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/completion_service.py index 0693fe3f9cbf..e2f0686798b9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/completion_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/completion_service.py @@ -42,8 +42,9 @@ class CompleteQueryRequest(proto.Message): Required. The typeahead input used to fetch suggestions. Maximum length is 128 characters. query_model (str): - Selects data model of query suggestions for serving. - Currently supported values: + Specifies the autocomplete data model. This overrides any + model specified in the Configuration > Autocomplete section + of the Cloud console. Currently supported values: - ``document`` - Using suggestions generated from user-imported documents. @@ -61,8 +62,8 @@ class CompleteQueryRequest(proto.Message): Default values: - ``document`` is the default model for regular dataStores. - - ``search-history`` is the default model for - [IndustryVertical.SITE_SEARCH][] dataStores. + - ``search-history`` is the default model for site search + dataStores. user_pseudo_id (str): A unique identifier for tracking visitors. For example, this could be implemented with an HTTP cookie, which should be @@ -136,12 +137,23 @@ class QuerySuggestion(proto.Message): Attributes: suggestion (str): The suggestion for the query. + completable_field_paths (MutableSequence[str]): + The unique document field paths that serve as + the source of this suggestion if it was + generated from completable fields. + + This field is only populated for the + document-completable model. """ suggestion: str = proto.Field( proto.STRING, number=1, ) + completable_field_paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) query_suggestions: MutableSequence[QuerySuggestion] = proto.RepeatedField( proto.MESSAGE, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversation.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversation.py index 543cacc91b02..ac0451ad8b12 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversation.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversation.py @@ -41,6 +41,8 @@ class Conversation(proto.Message): name (str): Immutable. Fully qualified name ``project/*/locations/global/collections/{collection}/dataStore/*/conversations/*`` + or + ``project/*/locations/global/collections/{collection}/engines/*/conversations/*``. state (google.cloud.discoveryengine_v1beta.types.Conversation.State): The state of the Conversation. user_pseudo_id (str): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py index 519976095226..5bde93922c38 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py @@ -90,6 +90,26 @@ class ConverseConversationRequest(proto.Message): summary_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.ContentSearchSpec.SummarySpec): A specification for configuring the summary returned in the response. + filter (str): + The filter syntax consists of an expression language for + constructing a predicate from one or more fields of the + documents being filtered. Filter expression is + case-sensitive. This will be used to filter search results + which may affect the summary response. + + If this field is unrecognizable, an ``INVALID_ARGUMENT`` is + returned. + + Filtering in Vertex AI Search is done by mapping the LHS + filter key to a key property defined in the Vertex AI Search + backend -- this mapping is defined by the customer in their + schema. For example a media customer might have a field + 'name' in their schema. In this case the filter would look + like this: filter --> name:'ANY("king kong")' + + For more information about filtering including syntax and + filter operators, see + `Filter `__ """ name: str = proto.Field( @@ -126,6 +146,10 @@ class ConverseConversationRequest(proto.Message): message=search_service.SearchRequest.ContentSearchSpec.SummarySpec, ) ) + filter: str = proto.Field( + proto.STRING, + number=9, + ) class ConverseConversationResponse(proto.Message): @@ -200,7 +224,7 @@ class UpdateConversationRequest(proto.Message): [Conversation][google.cloud.discoveryengine.v1beta.Conversation] to update. The following are NOT supported: - - [conversation.name][] + - [Conversation.name][google.cloud.discoveryengine.v1beta.Conversation.name] If not set or empty, all supported fields are updated. """ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/data_store.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/data_store.py new file mode 100644 index 000000000000..cdfc5fc6d9fe --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/data_store.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1beta.types import common + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "DataStore", + }, +) + + +class DataStore(proto.Message): + r"""DataStore captures global settings and configs at the + DataStore level. + + Attributes: + name (str): + Immutable. The full resource name of the data store. Format: + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + This field must be a UTF-8 encoded string with a length + limit of 1024 characters. + display_name (str): + Required. The data store display name. + + This field must be a UTF-8 encoded string with a length + limit of 128 characters. Otherwise, an INVALID_ARGUMENT + error is returned. + industry_vertical (google.cloud.discoveryengine_v1beta.types.IndustryVertical): + Immutable. The industry vertical that the + data store registers. + solution_types (MutableSequence[google.cloud.discoveryengine_v1beta.types.SolutionType]): + The solutions that the data store enrolls. Available + solutions for each + [industry_vertical][google.cloud.discoveryengine.v1beta.DataStore.industry_vertical]: + + - ``MEDIA``: ``SOLUTION_TYPE_RECOMMENDATION`` and + ``SOLUTION_TYPE_SEARCH``. + - ``SITE_SEARCH``: ``SOLUTION_TYPE_SEARCH`` is + automatically enrolled. Other solutions cannot be + enrolled. + default_schema_id (str): + Output only. The id of the default + [Schema][google.cloud.discoveryengine.v1beta.Schema] + asscociated to this data store. + content_config (google.cloud.discoveryengine_v1beta.types.DataStore.ContentConfig): + Immutable. The content config of the data store. If this + field is unset, the server behavior defaults to + [ContentConfig.NO_CONTENT][google.cloud.discoveryengine.v1beta.DataStore.ContentConfig.NO_CONTENT]. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + was created at. + """ + + class ContentConfig(proto.Enum): + r"""Content config of the data store. + + Values: + CONTENT_CONFIG_UNSPECIFIED (0): + Default value. + NO_CONTENT (1): + Only contains documents without any + [Document.content][google.cloud.discoveryengine.v1beta.Document.content]. + CONTENT_REQUIRED (2): + Only contains documents with + [Document.content][google.cloud.discoveryengine.v1beta.Document.content]. + PUBLIC_WEBSITE (3): + The data store is used for public website + search. + """ + CONTENT_CONFIG_UNSPECIFIED = 0 + NO_CONTENT = 1 + CONTENT_REQUIRED = 2 + PUBLIC_WEBSITE = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + industry_vertical: common.IndustryVertical = proto.Field( + proto.ENUM, + number=3, + enum=common.IndustryVertical, + ) + solution_types: MutableSequence[common.SolutionType] = proto.RepeatedField( + proto.ENUM, + number=5, + enum=common.SolutionType, + ) + default_schema_id: str = proto.Field( + proto.STRING, + number=7, + ) + content_config: ContentConfig = proto.Field( + proto.ENUM, + number=6, + enum=ContentConfig, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/data_store_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/data_store_service.py new file mode 100644 index 000000000000..e13403071bdc --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/data_store_service.py @@ -0,0 +1,325 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1beta.types import data_store as gcd_data_store + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "CreateDataStoreRequest", + "GetDataStoreRequest", + "CreateDataStoreMetadata", + "ListDataStoresRequest", + "ListDataStoresResponse", + "DeleteDataStoreRequest", + "UpdateDataStoreRequest", + "DeleteDataStoreMetadata", + }, +) + + +class CreateDataStoreRequest(proto.Message): + r"""Request for + [DataStoreService.CreateDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.CreateDataStore] + method. + + Attributes: + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}``. + data_store (google.cloud.discoveryengine_v1beta.types.DataStore): + Required. The + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + to create. + data_store_id (str): + Required. The ID to use for the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + which will become the final component of the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]'s + resource name. + + This field must conform to + `RFC-1034 `__ standard + with a length limit of 63 characters. Otherwise, an + INVALID_ARGUMENT error is returned. + create_advanced_site_search (bool): + A boolean flag indicating whether user want to directly + create an advanced data store for site search. If the data + store is not configured as site search (GENERIC vertical and + PUBLIC_WEBSITE content_config), this flag will be ignored. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + data_store: gcd_data_store.DataStore = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_data_store.DataStore, + ) + data_store_id: str = proto.Field( + proto.STRING, + number=3, + ) + create_advanced_site_search: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetDataStoreRequest(proto.Message): + r"""Request message for + [DataStoreService.GetDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.GetDataStore] + method. + + Attributes: + name (str): + Required. Full resource name of + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + such as + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + If the caller does not have permission to access the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the requested + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + does not exist, a NOT_FOUND error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDataStoreMetadata(proto.Message): + r"""Metadata related to the progress of the + [DataStoreService.CreateDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.CreateDataStore] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class ListDataStoresRequest(proto.Message): + r"""Request message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1beta.DataStoreService.ListDataStores] + method. + + Attributes: + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection_id}``. + + If the caller does not have permission to list + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]s + under this location, regardless of whether or not this data + store exists, a PERMISSION_DENIED error is returned. + page_size (int): + Maximum number of + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]s + to return. If unspecified, defaults to 10. The maximum + allowed value is 50. Values above 50 will be coerced to 50. + + If this field is negative, an INVALID_ARGUMENT is returned. + page_token (str): + A page token + [ListDataStoresResponse.next_page_token][google.cloud.discoveryengine.v1beta.ListDataStoresResponse.next_page_token], + received from a previous + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1beta.DataStoreService.ListDataStores] + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1beta.DataStoreService.ListDataStores] + must match the call that provided the page token. Otherwise, + an INVALID_ARGUMENT error is returned. + filter (str): + Filter by solution type. For example: filter = + 'solution_type:SOLUTION_TYPE_SEARCH' + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListDataStoresResponse(proto.Message): + r"""Response message for + [DataStoreService.ListDataStores][google.cloud.discoveryengine.v1beta.DataStoreService.ListDataStores] + method. + + Attributes: + data_stores (MutableSequence[google.cloud.discoveryengine_v1beta.types.DataStore]): + All the customer's + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]s. + next_page_token (str): + A token that can be sent as + [ListDataStoresRequest.page_token][google.cloud.discoveryengine.v1beta.ListDataStoresRequest.page_token] + to retrieve the next page. If this field is omitted, there + are no subsequent pages. + """ + + @property + def raw_page(self): + return self + + data_stores: MutableSequence[gcd_data_store.DataStore] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_data_store.DataStore, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteDataStoreRequest(proto.Message): + r"""Request message for + [DataStoreService.DeleteDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.DeleteDataStore] + method. + + Attributes: + name (str): + Required. Full resource name of + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + such as + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}``. + + If the caller does not have permission to delete the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + to delete does not exist, a NOT_FOUND error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDataStoreRequest(proto.Message): + r"""Request message for + [DataStoreService.UpdateDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.UpdateDataStore] + method. + + Attributes: + data_store (google.cloud.discoveryengine_v1beta.types.DataStore): + Required. The + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + to update. + + If the caller does not have permission to update the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + to update does not exist, a NOT_FOUND error is returned. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [DataStore][google.cloud.discoveryengine.v1beta.DataStore] + to update. + + If an unsupported or unknown field is provided, an + INVALID_ARGUMENT error is returned. + """ + + data_store: gcd_data_store.DataStore = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_data_store.DataStore, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteDataStoreMetadata(proto.Message): + r"""Metadata related to the progress of the + [DataStoreService.DeleteDataStore][google.cloud.discoveryengine.v1beta.DataStoreService.DeleteDataStore] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py index 061911cfeb39..1efd3c392ff2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py @@ -111,7 +111,8 @@ class Content(proto.Message): uri (str): The URI of the content. Only Cloud Storage URIs (e.g. ``gs://bucket-name/path/to/file``) are supported. The - maximum file size is 100 MB. + maximum file size is 2.5 MB for text-based formats, 100 MB + for other formats. This field is a member of `oneof`_ ``content``. mime_type (str): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/engine.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/engine.py new file mode 100644 index 000000000000..fb449f0a60b0 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/engine.py @@ -0,0 +1,342 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1beta.types import common + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "Engine", + }, +) + + +class Engine(proto.Message): + r"""Metadata that describes the training and serving parameters of an + [Engine][google.cloud.discoveryengine.v1beta.Engine]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + chat_engine_config (google.cloud.discoveryengine_v1beta.types.Engine.ChatEngineConfig): + Configurations for the Chat Engine. Only applicable if + [solution_type][google.cloud.discoveryengine.v1beta.Engine.solution_type] + is + [SOLUTION_TYPE_CHAT][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_CHAT]. + + This field is a member of `oneof`_ ``engine_config``. + search_engine_config (google.cloud.discoveryengine_v1beta.types.Engine.SearchEngineConfig): + Configurations for the Search Engine. Only applicable if + [solution_type][google.cloud.discoveryengine.v1beta.Engine.solution_type] + is + [SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_SEARCH]. + + This field is a member of `oneof`_ ``engine_config``. + chat_engine_metadata (google.cloud.discoveryengine_v1beta.types.Engine.ChatEngineMetadata): + Output only. Additional information of the Chat Engine. Only + applicable if + [solution_type][google.cloud.discoveryengine.v1beta.Engine.solution_type] + is + [SOLUTION_TYPE_CHAT][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_CHAT]. + + This field is a member of `oneof`_ ``engine_metadata``. + name (str): + Immutable. The fully qualified resource name of the engine. + + This field must be a UTF-8 encoded string with a length + limit of 1024 characters. + + Format: + ``projects/{project_number}/locations/{location}/collections/{collection}/engines/{engine}`` + engine should be 1-63 characters, and valid characters are + /[a-z0-9][a-z0-9-_]*/. Otherwise, an INVALID_ARGUMENT error + is returned. + display_name (str): + Required. The display name of the engine. + Should be human readable. UTF-8 encoded string + with limit of 1024 characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp the Recommendation + Engine was created at. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp the Recommendation + Engine was last updated. + data_store_ids (MutableSequence[str]): + The data stores associated with this engine. + + For + [SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_SEARCH] + and + [SOLUTION_TYPE_RECOMMENDATION][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + type of engines, they can only associate with at most one + data store. + + If + [solution_type][google.cloud.discoveryengine.v1beta.Engine.solution_type] + is + [SOLUTION_TYPE_CHAT][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_CHAT], + multiple + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]s + in the same + [Collection][google.cloud.discoveryengine.v1beta.Collection] + can be associated here. + + Note that when used in + [CreateEngineRequest][google.cloud.discoveryengine.v1beta.CreateEngineRequest], + one DataStore id must be provided as the system will use it + for necessary initializations. + solution_type (google.cloud.discoveryengine_v1beta.types.SolutionType): + Required. The solutions of the engine. + industry_vertical (google.cloud.discoveryengine_v1beta.types.IndustryVertical): + The industry vertical that the engine registers. The + restriction of the Engine industry vertical is based on + [DataStore][google.cloud.discoveryengine.v1beta.DataStore]: + If unspecified, default to ``GENERIC``. Vertical on Engine + has to match vertical of the DataStore liniked to the + engine. + common_config (google.cloud.discoveryengine_v1beta.types.Engine.CommonConfig): + Common config spec that specifies the + metadata of the engine. + """ + + class SearchEngineConfig(proto.Message): + r"""Configurations for a Search Engine. + + Attributes: + search_tier (google.cloud.discoveryengine_v1beta.types.SearchTier): + The search feature tier of this engine. + + Different tiers might have different pricing. To learn more, + please check the pricing documentation. + + Defaults to + [SearchTier.SEARCH_TIER_STANDARD][google.cloud.discoveryengine.v1beta.SearchTier.SEARCH_TIER_STANDARD] + if not specified. + search_add_ons (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchAddOn]): + The add-on that this search engine enables. + """ + + search_tier: common.SearchTier = proto.Field( + proto.ENUM, + number=1, + enum=common.SearchTier, + ) + search_add_ons: MutableSequence[common.SearchAddOn] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=common.SearchAddOn, + ) + + class ChatEngineConfig(proto.Message): + r"""Configurations for a Chat Engine. + + Attributes: + agent_creation_config (google.cloud.discoveryengine_v1beta.types.Engine.ChatEngineConfig.AgentCreationConfig): + The configurationt generate the Dialogflow agent that is + associated to this Engine. + + Note that these configurations are one-time consumed by and + passed to Dialogflow service. It means they cannot be + retrieved using + [EngineService.GetEngine][google.cloud.discoveryengine.v1beta.EngineService.GetEngine] + or + [EngineService.ListEngines][google.cloud.discoveryengine.v1beta.EngineService.ListEngines] + API after engine creation. + dialogflow_agent_to_link (str): + The resource name of an exist Dialogflow agent to link to + this Chat Engine. Customers can either provide + ``agent_creation_config`` to create agent or provide an + agent name that links the agent with the Chat engine. + + Format: + ``projects//locations//agents/``. + + Note that the ``dialogflow_agent_to_link`` are one-time + consumed by and passed to Dialogflow service. It means they + cannot be retrieved using + [EngineService.GetEngine][google.cloud.discoveryengine.v1beta.EngineService.GetEngine] + or + [EngineService.ListEngines][google.cloud.discoveryengine.v1beta.EngineService.ListEngines] + API after engine creation. Please use + [ChatEngineMetadata.dialogflow_agent][google.cloud.discoveryengine.v1beta.Engine.ChatEngineMetadata.dialogflow_agent] + for actual agent association after Engine is created. + """ + + class AgentCreationConfig(proto.Message): + r"""Configurations for generating a Dialogflow agent. + + Note that these configurations are one-time consumed by and passed + to Dialogflow service. It means they cannot be retrieved using + [EngineService.GetEngine][google.cloud.discoveryengine.v1beta.EngineService.GetEngine] + or + [EngineService.ListEngines][google.cloud.discoveryengine.v1beta.EngineService.ListEngines] + API after engine creation. + + Attributes: + business (str): + Name of the company, organization or other + entity that the agent represents. Used for + knowledge connector LLM prompt and for knowledge + search. + default_language_code (str): + Required. The default language of the agent as a language + tag. See `Language + Support `__ + for a list of the currently supported language codes. + time_zone (str): + Required. The time zone of the agent from the `time zone + database `__, e.g., + America/New_York, Europe/Paris. + location (str): + Agent location for Agent creation, supported + values: global/us/eu. If not provided, us Engine + will create Agent using us-central-1 by default; + eu Engine will create Agent using eu-west-1 by + default. + """ + + business: str = proto.Field( + proto.STRING, + number=1, + ) + default_language_code: str = proto.Field( + proto.STRING, + number=2, + ) + time_zone: str = proto.Field( + proto.STRING, + number=3, + ) + location: str = proto.Field( + proto.STRING, + number=4, + ) + + agent_creation_config: "Engine.ChatEngineConfig.AgentCreationConfig" = ( + proto.Field( + proto.MESSAGE, + number=1, + message="Engine.ChatEngineConfig.AgentCreationConfig", + ) + ) + dialogflow_agent_to_link: str = proto.Field( + proto.STRING, + number=2, + ) + + class CommonConfig(proto.Message): + r"""Common configurations for an Engine. + + Attributes: + company_name (str): + Immutable. The name of the company, business + or entity that is associated with the engine. + Setting this may help improve LLM related + features. + """ + + company_name: str = proto.Field( + proto.STRING, + number=1, + ) + + class ChatEngineMetadata(proto.Message): + r"""Additional information of a Chat Engine. + Fields in this message are output only. + + Attributes: + dialogflow_agent (str): + The resource name of a Dialogflow agent, that this Chat + Engine refers to. + + Format: + ``projects//locations//agents/``. + """ + + dialogflow_agent: str = proto.Field( + proto.STRING, + number=1, + ) + + chat_engine_config: ChatEngineConfig = proto.Field( + proto.MESSAGE, + number=11, + oneof="engine_config", + message=ChatEngineConfig, + ) + search_engine_config: SearchEngineConfig = proto.Field( + proto.MESSAGE, + number=13, + oneof="engine_config", + message=SearchEngineConfig, + ) + chat_engine_metadata: ChatEngineMetadata = proto.Field( + proto.MESSAGE, + number=12, + oneof="engine_metadata", + message=ChatEngineMetadata, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + data_store_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + solution_type: common.SolutionType = proto.Field( + proto.ENUM, + number=6, + enum=common.SolutionType, + ) + industry_vertical: common.IndustryVertical = proto.Field( + proto.ENUM, + number=16, + enum=common.IndustryVertical, + ) + common_config: CommonConfig = proto.Field( + proto.MESSAGE, + number=15, + message=CommonConfig, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/engine_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/engine_service.py new file mode 100644 index 000000000000..c1ce16f0929e --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/engine_service.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1beta.types import engine as gcd_engine + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "CreateEngineRequest", + "CreateEngineMetadata", + "DeleteEngineRequest", + "DeleteEngineMetadata", + "GetEngineRequest", + "ListEnginesRequest", + "ListEnginesResponse", + "UpdateEngineRequest", + }, +) + + +class CreateEngineRequest(proto.Message): + r"""Request for + [EngineService.CreateEngine][google.cloud.discoveryengine.v1beta.EngineService.CreateEngine] + method. + + Attributes: + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}``. + engine (google.cloud.discoveryengine_v1beta.types.Engine): + Required. The + [Engine][google.cloud.discoveryengine.v1beta.Engine] to + create. + engine_id (str): + Required. The ID to use for the + [Engine][google.cloud.discoveryengine.v1beta.Engine], which + will become the final component of the + [Engine][google.cloud.discoveryengine.v1beta.Engine]'s + resource name. + + This field must conform to + `RFC-1034 `__ standard + with a length limit of 63 characters. Otherwise, an + INVALID_ARGUMENT error is returned. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + engine: gcd_engine.Engine = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_engine.Engine, + ) + engine_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class CreateEngineMetadata(proto.Message): + r"""Metadata related to the progress of the + [EngineService.CreateEngine][google.cloud.discoveryengine.v1beta.EngineService.CreateEngine] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class DeleteEngineRequest(proto.Message): + r"""Request message for + [EngineService.DeleteEngine][google.cloud.discoveryengine.v1beta.EngineService.DeleteEngine] + method. + + Attributes: + name (str): + Required. Full resource name of + [Engine][google.cloud.discoveryengine.v1beta.Engine], such + as + ``projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}``. + + If the caller does not have permission to delete the + [Engine][google.cloud.discoveryengine.v1beta.Engine], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the [Engine][google.cloud.discoveryengine.v1beta.Engine] + to delete does not exist, a NOT_FOUND error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteEngineMetadata(proto.Message): + r"""Metadata related to the progress of the + [EngineService.DeleteEngine][google.cloud.discoveryengine.v1beta.EngineService.DeleteEngine] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class GetEngineRequest(proto.Message): + r"""Request message for + [EngineService.GetEngine][google.cloud.discoveryengine.v1beta.EngineService.GetEngine] + method. + + Attributes: + name (str): + Required. Full resource name of + [Engine][google.cloud.discoveryengine.v1beta.Engine], such + as + ``projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListEnginesRequest(proto.Message): + r"""Request message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1beta.EngineService.ListEngines] + method. + + Attributes: + parent (str): + Required. The parent resource name, such as + ``projects/{project}/locations/{location}/collections/{collection_id}``. + page_size (int): + Optional. Not supported. + page_token (str): + Optional. Not supported. + filter (str): + Optional. Filter by solution type. For example: + solution_type=SOLUTION_TYPE_SEARCH + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListEnginesResponse(proto.Message): + r"""Response message for + [EngineService.ListEngines][google.cloud.discoveryengine.v1beta.EngineService.ListEngines] + method. + + Attributes: + engines (MutableSequence[google.cloud.discoveryengine_v1beta.types.Engine]): + All the customer's + [Engine][google.cloud.discoveryengine.v1beta.Engine]s. + next_page_token (str): + Not supported. + """ + + @property + def raw_page(self): + return self + + engines: MutableSequence[gcd_engine.Engine] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_engine.Engine, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateEngineRequest(proto.Message): + r"""Request message for + [EngineService.UpdateEngine][google.cloud.discoveryengine.v1beta.EngineService.UpdateEngine] + method. + + Attributes: + engine (google.cloud.discoveryengine_v1beta.types.Engine): + Required. The + [Engine][google.cloud.discoveryengine.v1beta.Engine] to + update. + + If the caller does not have permission to update the + [Engine][google.cloud.discoveryengine.v1beta.Engine], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the [Engine][google.cloud.discoveryengine.v1beta.Engine] + to update does not exist, a NOT_FOUND error is returned. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [Engine][google.cloud.discoveryengine.v1beta.Engine] to + update. + + If an unsupported or unknown field is provided, an + INVALID_ARGUMENT error is returned. + """ + + engine: gcd_engine.Engine = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_engine.Engine, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py index 7f81120ce100..3d592cf7e54f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py @@ -22,7 +22,7 @@ from google.type import date_pb2 # type: ignore import proto # type: ignore -from google.cloud.discoveryengine_v1beta.types import document, user_event +from google.cloud.discoveryengine_v1beta.types import completion, document, user_event __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1beta", @@ -36,6 +36,9 @@ "ImportDocumentsMetadata", "ImportDocumentsRequest", "ImportDocumentsResponse", + "ImportSuggestionDenyListEntriesRequest", + "ImportSuggestionDenyListEntriesResponse", + "ImportSuggestionDenyListEntriesMetadata", }, ) @@ -603,4 +606,131 @@ class ImportDocumentsResponse(proto.Message): ) +class ImportSuggestionDenyListEntriesRequest(proto.Message): + r"""Request message for + [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.ImportSuggestionDenyListEntries] + method. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + inline_source (google.cloud.discoveryengine_v1beta.types.ImportSuggestionDenyListEntriesRequest.InlineSource): + The Inline source for the input content for + suggestion deny list entries. + + This field is a member of `oneof`_ ``source``. + gcs_source (google.cloud.discoveryengine_v1beta.types.GcsSource): + Cloud Storage location for the input content. + + Only 1 file can be specified that contains all entries to + import. Supported values ``gcs_source.schema`` for + autocomplete suggestion deny list entry imports: + + - ``suggestion_deny_list`` (default): One JSON + [SuggestionDenyListEntry] per line. + + This field is a member of `oneof`_ ``source``. + parent (str): + Required. The parent data store resource name for which to + import denylist entries. Follows pattern + projects/\ */locations/*/collections/*/dataStores/*. + """ + + class InlineSource(proto.Message): + r"""The inline source for SuggestionDenyListEntry. + + Attributes: + entries (MutableSequence[google.cloud.discoveryengine_v1beta.types.SuggestionDenyListEntry]): + Required. A list of all denylist entries to + import. Max of 1000 items. + """ + + entries: MutableSequence[ + completion.SuggestionDenyListEntry + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=completion.SuggestionDenyListEntry, + ) + + inline_source: InlineSource = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message=InlineSource, + ) + gcs_source: "GcsSource" = proto.Field( + proto.MESSAGE, + number=3, + oneof="source", + message="GcsSource", + ) + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ImportSuggestionDenyListEntriesResponse(proto.Message): + r"""Response message for + [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.ImportSuggestionDenyListEntries] + method. + + Attributes: + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while + processing the request. + imported_entries_count (int): + Count of deny list entries successfully + imported. + failed_entries_count (int): + Count of deny list entries that failed to be + imported. + """ + + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + imported_entries_count: int = proto.Field( + proto.INT64, + number=2, + ) + failed_entries_count: int = proto.Field( + proto.INT64, + number=3, + ) + + +class ImportSuggestionDenyListEntriesMetadata(proto.Message): + r"""Metadata related to the progress of the + ImportSuggestionDenyListEntries operation. This is returned by + the google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py index 2c5fcc5cc261..94fc28a05eb9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -26,6 +27,9 @@ "PurgeDocumentsRequest", "PurgeDocumentsResponse", "PurgeDocumentsMetadata", + "PurgeSuggestionDenyListEntriesRequest", + "PurgeSuggestionDenyListEntriesResponse", + "PurgeSuggestionDenyListEntriesMetadata", }, ) @@ -128,4 +132,72 @@ class PurgeDocumentsMetadata(proto.Message): ) +class PurgeSuggestionDenyListEntriesRequest(proto.Message): + r"""Request message for + [CompletionService.PurgeSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.PurgeSuggestionDenyListEntries] + method. + + Attributes: + parent (str): + Required. The parent data store resource name for which to + import denylist entries. Follows pattern + projects/\ */locations/*/collections/*/dataStores/*. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PurgeSuggestionDenyListEntriesResponse(proto.Message): + r"""Response message for + [CompletionService.PurgeSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.PurgeSuggestionDenyListEntries] + method. + + Attributes: + purge_count (int): + Number of suggestion deny list entries + purged. + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while + processing the request. + """ + + purge_count: int = proto.Field( + proto.INT64, + number=1, + ) + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + + +class PurgeSuggestionDenyListEntriesMetadata(proto.Message): + r"""Metadata related to the progress of the + PurgeSuggestionDenyListEntries operation. This is returned by + the google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/recommendation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/recommendation_service.py index c468f53c1d84..838423d0389c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/recommendation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/recommendation_service.py @@ -37,11 +37,22 @@ class RecommendRequest(proto.Message): Attributes: serving_config (str): - Required. Full resource name of the format: + Required. Full resource name of a + [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig]: + ``projects/*/locations/global/collections/*/engines/*/servingConfigs/*``, + or ``projects/*/locations/global/collections/*/dataStores/*/servingConfigs/*`` - Before you can request recommendations from your model, you - must create at least one serving config for it. + One default serving config is created along with your + recommendation engine creation. The engine ID will be used + as the ID of the default serving config. For example, for + Engine + ``projects/*/locations/global/collections/*/engines/my-engine``, + you can use + ``projects/*/locations/global/collections/*/engines/my-engine/servingConfigs/my-engine`` + for your + [RecommendationService.Recommend][google.cloud.discoveryengine.v1beta.RecommendationService.Recommend] + requests. user_event (google.cloud.discoveryengine_v1beta.types.UserEvent): Required. Context about the user, what they are looking at and what action they took to trigger the Recommend request. @@ -77,6 +88,16 @@ class RecommendRequest(proto.Message): - ``(filter_tags: ANY("Red", "Blue") OR filter_tags: ANY("Hot", "Cold"))`` - ``(filter_tags: ANY("Red", "Blue")) AND NOT (filter_tags: ANY("Green"))`` + If ``attributeFilteringSyntax`` is set to true under the + ``params`` field, then attribute-based expressions are + expected instead of the above described tag-based syntax. + Examples: + + - (launguage: ANY("en", "es")) AND NOT (categories: + ANY("Movie")) + - (available: true) AND (launguage: ANY("en", "es")) OR + (categories: ANY("Movie")) + If your filter blocks all results, the API will return generic (unfiltered) popular Documents. If you only want results strictly matching the filters, set @@ -124,6 +145,10 @@ class RecommendRequest(proto.Message): - ``auto-diversity`` This gives request-level control and adjusts recommendation results based on Document category. + + - ``attributeFilteringSyntax``: Boolean. False by default. + If set to true, the ``filter`` field is interpreted + according to the new, attribute-based syntax. user_labels (MutableMapping[str, str]): The user labels applied to a resource must meet the following requirements: diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py index 4e841e4cbb5d..cf07db4480c6 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py @@ -41,6 +41,8 @@ class SearchRequest(proto.Message): serving_config (str): Required. The resource name of the Search serving config, such as + ``projects/*/locations/global/collections/default_collection/engines/*/servingConfigs/default_serving_config``, + or ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store/servingConfigs/default_serving_config``. This field is used to identify the serving configuration name, set of models used to make the search. @@ -91,12 +93,39 @@ class SearchRequest(proto.Message): If this field is unrecognizable, an ``INVALID_ARGUMENT`` is returned. + + Filtering in Vertex AI Search is done by mapping the LHS + filter key to a key property defined in the Vertex AI Search + backend -- this mapping is defined by the customer in their + schema. For example a media customer might have a field + 'name' in their schema. In this case the filter would look + like this: filter --> name:'ANY("king kong")' + + For more information about filtering including syntax and + filter operators, see + `Filter `__ + canonical_filter (str): + The default filter that is applied when a user performs a + search without checking any filters on the search page. + + The filter applied to every search request when quality + improvement such as query expansion is needed. In the case a + query does not have a sufficient amount of results this + filter will be used to determine whether or not to enable + the query expansion flow. The original filter will still be + used for the query expanded search. This field is strongly + recommended to achieve high search quality. + + For more information about filter syntax, see + [SearchRequest.filter][google.cloud.discoveryengine.v1beta.SearchRequest.filter]. order_by (str): The order in which documents are returned. Documents can be ordered by a field in an [Document][google.cloud.discoveryengine.v1beta.Document] object. Leave it unset if ordered by relevance. ``order_by`` - expression is case-sensitive. + expression is case-sensitive. For more information on + ordering, see + `Ordering `__ If this field is unrecognizable, an ``INVALID_ARGUMENT`` is returned. @@ -112,8 +141,9 @@ class SearchRequest(proto.Message): A maximum of 100 values are allowed. Otherwise, an ``INVALID_ARGUMENT`` error is returned. boost_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.BoostSpec): - Boost specification to boost certain - documents. + Boost specification to boost certain documents. For more + information on boosting, see + `Boosting `__ params (MutableMapping[str, google.protobuf.struct_pb2.Value]): Additional search parameters. @@ -121,11 +151,15 @@ class SearchRequest(proto.Message): - ``user_country_code``: string. Default empty. If set to non-empty, results are restricted or boosted based on the - location provided. + location provided. Example: user_country_code: "au" + + For available codes see `Country + Codes `__ + - ``search_type``: double. Default empty. Enables non-webpage searching depending on the value. The only valid non-default value is 1, which enables image - searching. + searching. Example: search_type: 1 query_expansion_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.QueryExpansionSpec): The query expansion specification that specifies the conditions under which query @@ -159,20 +193,20 @@ class SearchRequest(proto.Message): Uses the provided embedding to do additional semantic document retrieval. The retrieval is based on the dot product of - [SearchRequest.embedding_spec.embedding_vectors.vector][] + [SearchRequest.EmbeddingSpec.EmbeddingVector.vector][google.cloud.discoveryengine.v1beta.SearchRequest.EmbeddingSpec.EmbeddingVector.vector] and the document embedding that is provided in - [SearchRequest.embedding_spec.embedding_vectors.field_path][]. + [SearchRequest.EmbeddingSpec.EmbeddingVector.field_path][google.cloud.discoveryengine.v1beta.SearchRequest.EmbeddingSpec.EmbeddingVector.field_path]. If - [SearchRequest.embedding_spec.embedding_vectors.field_path][] + [SearchRequest.EmbeddingSpec.EmbeddingVector.field_path][google.cloud.discoveryengine.v1beta.SearchRequest.EmbeddingSpec.EmbeddingVector.field_path] is not provided, it will use - [ServingConfig.embedding_config.field_paths][]. + [ServingConfig.EmbeddingConfig.field_path][]. ranking_expression (str): The ranking expression controls the customized ranking on retrieval documents. This overrides - [ServingConfig.ranking_expression][]. The ranking expression - is a single function or multiple functions that are joint by - "+". + [ServingConfig.ranking_expression][google.cloud.discoveryengine.v1beta.ServingConfig.ranking_expression]. + The ranking expression is a single function or multiple + functions that are joint by "+". - ranking_expression = function, { " + ", function }; Supported functions: @@ -686,11 +720,55 @@ class SummarySpec(proto.Message): navigational queries. If this field is set to ``true``, we skip generating summaries for non-summary seeking queries and return fallback messages instead. + model_prompt_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec): + If specified, the spec will be used to modify + the prompt provided to the LLM. language_code (str): Language code for Summary. Use language tags defined by - [BCP47][https://www.rfc-editor.org/rfc/bcp/bcp47.txt]. + `BCP47 `__. + Note: This is an experimental feature. + model_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.ContentSearchSpec.SummarySpec.ModelSpec): + If specified, the spec will be used to modify + the model specification provided to the LLM. """ + class ModelPromptSpec(proto.Message): + r"""Specification of the prompt to use with the model. + + Attributes: + preamble (str): + Text at the beginning of the prompt that + instructs the assistant. Examples are available + in the user guide. + """ + + preamble: str = proto.Field( + proto.STRING, + number=1, + ) + + class ModelSpec(proto.Message): + r"""Specification of the model. + + Attributes: + version (str): + The model version used to generate the summary. + + Supported values are: + + - ``stable``: string. Default value when no value is + specified. Uses a generally available, fine-tuned version + of the text-bison@001 model. + - ``preview``: string. (Public preview) Uses a fine-tuned + version of the text-bison@002 model. This model works + only for summaries in English. + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + summary_result_count: int = proto.Field( proto.INT32, number=1, @@ -707,10 +785,22 @@ class SummarySpec(proto.Message): proto.BOOL, number=4, ) + model_prompt_spec: "SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec" = proto.Field( + proto.MESSAGE, + number=5, + message="SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec", + ) language_code: str = proto.Field( proto.STRING, number=6, ) + model_spec: "SearchRequest.ContentSearchSpec.SummarySpec.ModelSpec" = ( + proto.Field( + proto.MESSAGE, + number=7, + message="SearchRequest.ContentSearchSpec.SummarySpec.ModelSpec", + ) + ) class ExtractiveContentSpec(proto.Message): r"""A specification for configuring the extractive content in a @@ -729,7 +819,7 @@ class ExtractiveContentSpec(proto.Message): ``max_extractive_answer_count``, return all of the answers. Otherwise, return the ``max_extractive_answer_count``. - At most one answer is returned for each + At most five answers are returned for each [SearchResult][google.cloud.discoveryengine.v1beta.SearchResponse.SearchResult]. max_extractive_segment_count (int): The max number of extractive segments returned in each @@ -756,6 +846,10 @@ class ExtractiveContentSpec(proto.Message): Specifies whether to return the confidence score from the extractive segments in each search result. The default value is ``false``. + + Note: this is a priavte preview feature and only works for + allowlisted users, please reach out to Cloud Support team if + you want to use it. num_previous_segments (int): Specifies whether to also include the adjacent from each selected segments. Return at most ``num_previous_segments`` @@ -872,6 +966,10 @@ class EmbeddingVector(proto.Message): proto.STRING, number=7, ) + canonical_filter: str = proto.Field( + proto.STRING, + number=29, + ) order_by: str = proto.Field( proto.STRING, number=8, @@ -1153,6 +1251,8 @@ class Summary(proto.Message): safety_attributes (google.cloud.discoveryengine_v1beta.types.SearchResponse.Summary.SafetyAttributes): A collection of Safety Attribute categories and their associated confidence scores. + summary_with_metadata (google.cloud.discoveryengine_v1beta.types.SearchResponse.Summary.SummaryWithMetadata): + Summary with metadata information. """ class SummarySkippedReason(proto.Enum): @@ -1225,6 +1325,125 @@ class SafetyAttributes(proto.Message): number=2, ) + class CitationMetadata(proto.Message): + r"""Citation metadata. + + Attributes: + citations (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchResponse.Summary.Citation]): + Citations for segments. + """ + + citations: MutableSequence[ + "SearchResponse.Summary.Citation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SearchResponse.Summary.Citation", + ) + + class Citation(proto.Message): + r"""Citation info for a segment. + + Attributes: + start_index (int): + Index indicates the start of the segment, + measured in bytes/unicode. + end_index (int): + End of the attributed segment, exclusive. + sources (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchResponse.Summary.CitationSource]): + Citation sources for the attributed segment. + """ + + start_index: int = proto.Field( + proto.INT64, + number=1, + ) + end_index: int = proto.Field( + proto.INT64, + number=2, + ) + sources: MutableSequence[ + "SearchResponse.Summary.CitationSource" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="SearchResponse.Summary.CitationSource", + ) + + class CitationSource(proto.Message): + r"""Citation source. + + Attributes: + reference_index (int): + Document reference index from + SummaryWithMetadata.references. It is 0-indexed and the + value will be zero if the reference_index is not set + explicitly. + """ + + reference_index: int = proto.Field( + proto.INT64, + number=4, + ) + + class Reference(proto.Message): + r"""Document reference. + + Attributes: + title (str): + Title of the document. + document (str): + Required. + [Document.name][google.cloud.discoveryengine.v1beta.Document.name] + of the document. Full resource name of the referenced + document, in the format + ``projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*``. + uri (str): + Cloud Storage or HTTP uri for the document. + """ + + title: str = proto.Field( + proto.STRING, + number=1, + ) + document: str = proto.Field( + proto.STRING, + number=2, + ) + uri: str = proto.Field( + proto.STRING, + number=3, + ) + + class SummaryWithMetadata(proto.Message): + r"""Summary with metadata information. + + Attributes: + summary (str): + Summary text with no citation information. + citation_metadata (google.cloud.discoveryengine_v1beta.types.SearchResponse.Summary.CitationMetadata): + Citation metadata for given summary. + references (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchResponse.Summary.Reference]): + Document References. + """ + + summary: str = proto.Field( + proto.STRING, + number=1, + ) + citation_metadata: "SearchResponse.Summary.CitationMetadata" = proto.Field( + proto.MESSAGE, + number=2, + message="SearchResponse.Summary.CitationMetadata", + ) + references: MutableSequence[ + "SearchResponse.Summary.Reference" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="SearchResponse.Summary.Reference", + ) + summary_text: str = proto.Field( proto.STRING, number=1, @@ -1241,6 +1460,13 @@ class SafetyAttributes(proto.Message): number=3, message="SearchResponse.Summary.SafetyAttributes", ) + summary_with_metadata: "SearchResponse.Summary.SummaryWithMetadata" = ( + proto.Field( + proto.MESSAGE, + number=4, + message="SearchResponse.Summary.SummaryWithMetadata", + ) + ) class QueryExpansionInfo(proto.Message): r"""Information describing query expansion including whether diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/serving_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/serving_config.py new file mode 100644 index 000000000000..ae232303eb4b --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/serving_config.py @@ -0,0 +1,389 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1beta.types import common, search_service + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "ServingConfig", + }, +) + + +class ServingConfig(proto.Message): + r"""Configures metadata that is used to generate serving time + results (e.g. search results or recommendation predictions). The + ServingConfig is passed in the search and predict request and + generates results. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + media_config (google.cloud.discoveryengine_v1beta.types.ServingConfig.MediaConfig): + The MediaConfig of the serving configuration. + + This field is a member of `oneof`_ ``vertical_config``. + generic_config (google.cloud.discoveryengine_v1beta.types.ServingConfig.GenericConfig): + The GenericConfig of the serving + configuration. + + This field is a member of `oneof`_ ``vertical_config``. + name (str): + Immutable. Fully qualified name + ``projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}/servingConfigs/{serving_config_id}`` + display_name (str): + Required. The human readable serving config display name. + Used in Discovery UI. + + This field must be a UTF-8 encoded string with a length + limit of 128 characters. Otherwise, an INVALID_ARGUMENT + error is returned. + solution_type (google.cloud.discoveryengine_v1beta.types.SolutionType): + Required. Immutable. Specifies the solution + type that a serving config can be associated + with. + model_id (str): + The id of the model to use at serving time. Currently only + RecommendationModels are supported. Can be changed but only + to a compatible model (e.g. others-you-may-like CTR to + others-you-may-like CVR). + + Required when + [SolutionType][google.cloud.discoveryengine.v1beta.SolutionType] + is + [SOLUTION_TYPE_RECOMMENDATION][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_RECOMMENDATION]. + diversity_level (str): + How much diversity to use in recommendation model results + e.g. ``medium-diversity`` or ``high-diversity``. Currently + supported values: + + - ``no-diversity`` + - ``low-diversity`` + - ``medium-diversity`` + - ``high-diversity`` + - ``auto-diversity`` + + If not specified, we choose default based on recommendation + model type. Default value: ``no-diversity``. + + Can only be set if + [SolutionType][google.cloud.discoveryengine.v1beta.SolutionType] + is + [SOLUTION_TYPE_RECOMMENDATION][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_RECOMMENDATION]. + embedding_config (google.cloud.discoveryengine_v1beta.types.EmbeddingConfig): + Bring your own embedding config. The config is used for + search semantic retrieval. The retrieval is based on the dot + product of + [SearchRequest.EmbeddingSpec.EmbeddingVector.vector][google.cloud.discoveryengine.v1beta.SearchRequest.EmbeddingSpec.EmbeddingVector.vector] + and the document embeddings that are provided by this + EmbeddingConfig. If + [SearchRequest.EmbeddingSpec.EmbeddingVector.vector][google.cloud.discoveryengine.v1beta.SearchRequest.EmbeddingSpec.EmbeddingVector.vector] + is provided, it overrides this + [ServingConfig.embedding_config][google.cloud.discoveryengine.v1beta.ServingConfig.embedding_config]. + ranking_expression (str): + The ranking expression controls the customized ranking on + retrieval documents. To leverage this, document embedding is + required. The ranking expression setting in ServingConfig + applies to all search requests served by the serving config. + However, if [SearchRequest.ranking_expression][] is + specified, it overrides the ServingConfig ranking + expression. + + The ranking expression is a single function or multiple + functions that are joined by "+". + + - ranking_expression = function, { " + ", function }; + Supported functions: + - double \* relevance_score + - double \* dotProduct(embedding_field_path) Function + variables: relevance_score: pre-defined keywords, used + for measure relevance between query and document. + embedding_field_path: the document embedding field used + with query embedding vector. dotProduct: embedding + function between embedding_field_path and query embedding + vector. + + Example ranking expression: If document has an embedding + field doc_embedding, the ranking expression could be 0.5 \* + relevance_score + 0.3 \* dotProduct(doc_embedding). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. ServingConfig created timestamp. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. ServingConfig updated timestamp. + filter_control_ids (MutableSequence[str]): + Filter controls to use in serving path. + All triggered filter controls will be applied. + Filter controls must be in the same data store + as the serving config. Maximum of 20 filter + controls. + boost_control_ids (MutableSequence[str]): + Boost controls to use in serving path. + All triggered boost controls will be applied. + Boost controls must be in the same data store as + the serving config. Maximum of 20 boost + controls. + redirect_control_ids (MutableSequence[str]): + IDs of the redirect controls. Only the first triggered + redirect action is applied, even if multiple apply. Maximum + number of specifications is 100. + + Can only be set if + [SolutionType][google.cloud.discoveryengine.v1beta.SolutionType] + is + [SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_SEARCH]. + synonyms_control_ids (MutableSequence[str]): + Condition synonyms specifications. If multiple synonyms + conditions match, all matching synonyms controls in the list + will execute. Maximum number of specifications is 100. + + Can only be set if + [SolutionType][google.cloud.discoveryengine.v1beta.SolutionType] + is + [SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_SEARCH]. + oneway_synonyms_control_ids (MutableSequence[str]): + Condition oneway synonyms specifications. If multiple oneway + synonyms conditions match, all matching oneway synonyms + controls in the list will execute. Maximum number of + specifications is 100. + + Can only be set if + [SolutionType][google.cloud.discoveryengine.v1beta.SolutionType] + is + [SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_SEARCH]. + dissociate_control_ids (MutableSequence[str]): + Condition do not associate specifications. If multiple do + not associate conditions match, all matching do not + associate controls in the list will execute. Order does not + matter. Maximum number of specifications is 100. + + Can only be set if + [SolutionType][google.cloud.discoveryengine.v1beta.SolutionType] + is + [SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_SEARCH]. + replacement_control_ids (MutableSequence[str]): + Condition replacement specifications. Applied according to + the order in the list. A previously replaced term can not be + re-replaced. Maximum number of specifications is 100. + + Can only be set if + [SolutionType][google.cloud.discoveryengine.v1beta.SolutionType] + is + [SOLUTION_TYPE_SEARCH][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_SEARCH]. + ignore_control_ids (MutableSequence[str]): + Condition ignore specifications. If multiple + ignore conditions match, all matching ignore + controls in the list will execute. + Order does not matter. + Maximum number of specifications is 100. + """ + + class MediaConfig(proto.Message): + r"""Specifies the configurations needed for Media Discovery. Currently + we support: + + - ``demote_content_watched``: Threshold for watched content + demotion. Customers can specify if using watched content demotion + or use viewed detail page. Using the content watched demotion, + customers need to specify the watched minutes or percentage + exceeds the threshold, the content will be demoted in the + recommendation result. + - ``promote_fresh_content``: cutoff days for fresh content + promotion. Customers can specify if using content freshness + promotion. If the content was published within the cutoff days, + the content will be promoted in the recommendation result. Can + only be set if + [SolutionType][google.cloud.discoveryengine.v1beta.SolutionType] + is + [SOLUTION_TYPE_RECOMMENDATION][google.cloud.discoveryengine.v1beta.SolutionType.SOLUTION_TYPE_RECOMMENDATION]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + content_watched_percentage_threshold (float): + Specifies the content watched percentage threshold for + demotion. Threshold value must be between [0, 1.0] + inclusive. + + This field is a member of `oneof`_ ``demote_content_watched``. + content_watched_seconds_threshold (float): + Specifies the content watched minutes + threshold for demotion. + + This field is a member of `oneof`_ ``demote_content_watched``. + demotion_event_type (str): + Specifies the event type used for demoting recommendation + result. Currently supported values: + + - ``view-item``: Item viewed. + - ``media-play``: Start/resume watching a video, playing a + song, etc. + - ``media-complete``: Finished or stopped midway through a + video, song, etc. + + If unset, watch history demotion will not be applied. + Content freshness demotion will still be applied. + content_freshness_cutoff_days (int): + Specifies the content freshness used for + recommendation result. Contents will be demoted + if contents were published for more than content + freshness cutoff days. + """ + + content_watched_percentage_threshold: float = proto.Field( + proto.FLOAT, + number=2, + oneof="demote_content_watched", + ) + content_watched_seconds_threshold: float = proto.Field( + proto.FLOAT, + number=5, + oneof="demote_content_watched", + ) + demotion_event_type: str = proto.Field( + proto.STRING, + number=1, + ) + content_freshness_cutoff_days: int = proto.Field( + proto.INT32, + number=4, + ) + + class GenericConfig(proto.Message): + r"""Specifies the configurations needed for Generic Discovery.Currently + we support: + + - ``content_search_spec``: configuration for generic content + search. + + Attributes: + content_search_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.ContentSearchSpec): + Specifies the expected behavior of content + search. Only valid for content-search enabled + data store. + """ + + content_search_spec: search_service.SearchRequest.ContentSearchSpec = ( + proto.Field( + proto.MESSAGE, + number=1, + message=search_service.SearchRequest.ContentSearchSpec, + ) + ) + + media_config: MediaConfig = proto.Field( + proto.MESSAGE, + number=7, + oneof="vertical_config", + message=MediaConfig, + ) + generic_config: GenericConfig = proto.Field( + proto.MESSAGE, + number=10, + oneof="vertical_config", + message=GenericConfig, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + solution_type: common.SolutionType = proto.Field( + proto.ENUM, + number=3, + enum=common.SolutionType, + ) + model_id: str = proto.Field( + proto.STRING, + number=4, + ) + diversity_level: str = proto.Field( + proto.STRING, + number=5, + ) + embedding_config: common.EmbeddingConfig = proto.Field( + proto.MESSAGE, + number=20, + message=common.EmbeddingConfig, + ) + ranking_expression: str = proto.Field( + proto.STRING, + number=21, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + filter_control_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=11, + ) + boost_control_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) + redirect_control_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=14, + ) + synonyms_control_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=15, + ) + oneway_synonyms_control_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=16, + ) + dissociate_control_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=17, + ) + replacement_control_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=18, + ) + ignore_control_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=19, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/serving_config_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/serving_config_service.py new file mode 100644 index 000000000000..ed5842e20d77 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/serving_config_service.py @@ -0,0 +1,142 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1beta.types import ( + serving_config as gcd_serving_config, +) + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "UpdateServingConfigRequest", + "GetServingConfigRequest", + "ListServingConfigsRequest", + "ListServingConfigsResponse", + }, +) + + +class UpdateServingConfigRequest(proto.Message): + r"""Request for UpdateServingConfig method. + + Attributes: + serving_config (google.cloud.discoveryengine_v1beta.types.ServingConfig): + Required. The ServingConfig to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Indicates which fields in the provided + [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] + to update. The following are NOT supported: + + - [ServingConfig.name][google.cloud.discoveryengine.v1beta.ServingConfig.name] + + If not set, all supported fields are updated. + """ + + serving_config: gcd_serving_config.ServingConfig = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_serving_config.ServingConfig, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetServingConfigRequest(proto.Message): + r"""Request for GetServingConfig method. + + Attributes: + name (str): + Required. The resource name of the ServingConfig to get. + Format: + ``projects/{project_number}/locations/{location}/collections/{collection}/dataStores/{data_store}/servingConfigs/{serving_config_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListServingConfigsRequest(proto.Message): + r"""Request for ListServingConfigs method. + + Attributes: + parent (str): + Required. The dataStore resource name. Format: + ``projects/{project_number}/locations/{location}/collections/{collection}/dataStores/{data_store}`` + page_size (int): + Optional. Maximum number of results to + return. If unspecified, defaults to 100. If a + value greater than 100 is provided, at most 100 + results are returned. + page_token (str): + Optional. A page token, received from a previous + ``ListServingConfigs`` call. Provide this to retrieve the + subsequent page. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListServingConfigsResponse(proto.Message): + r"""Response for ListServingConfigs method. + + Attributes: + serving_configs (MutableSequence[google.cloud.discoveryengine_v1beta.types.ServingConfig]): + All the ServingConfigs for a given dataStore. + next_page_token (str): + Pagination token, if not returned indicates + the last page. + """ + + @property + def raw_page(self): + return self + + serving_configs: MutableSequence[ + gcd_serving_config.ServingConfig + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_serving_config.ServingConfig, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/site_search_engine.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/site_search_engine.py new file mode 100644 index 000000000000..df211ca7863b --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/site_search_engine.py @@ -0,0 +1,257 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "SiteSearchEngine", + "TargetSite", + "SiteVerificationInfo", + }, +) + + +class SiteSearchEngine(proto.Message): + r"""SiteSearchEngine captures DataStore level site search + persisting configurations. It is a singleton value per data + store. + + Attributes: + name (str): + The fully qualified resource name of the site search engine. + Format: + ``projects/*/locations/*/dataStores/*/siteSearchEngine`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class TargetSite(proto.Message): + r"""A target site for the SiteSearchEngine. + + Attributes: + name (str): + Output only. The fully qualified resource name of the target + site. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}`` + The ``target_site_id`` is system-generated. + provided_uri_pattern (str): + Required. Input only. The user provided URI pattern from + which the ``generated_uri_pattern`` is generated. + type_ (google.cloud.discoveryengine_v1beta.types.TargetSite.Type): + The type of the target site, e.g., whether + the site is to be included or excluded. + exact_match (bool): + Input only. If set to false, a uri_pattern is generated to + include all pages whose address contains the + provided_uri_pattern. If set to true, an uri_pattern is + generated to try to be an exact match of the + provided_uri_pattern or just the specific page if the + provided_uri_pattern is a specific one. provided_uri_pattern + is always normalized to generate the URI pattern to be used + by the search engine. + generated_uri_pattern (str): + Output only. This is system-generated based on the + provided_uri_pattern. + site_verification_info (google.cloud.discoveryengine_v1beta.types.SiteVerificationInfo): + Output only. Site ownership and validity + verification status. + indexing_status (google.cloud.discoveryengine_v1beta.types.TargetSite.IndexingStatus): + Output only. Indexing status. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The target site's last updated + time. + failure_reason (google.cloud.discoveryengine_v1beta.types.TargetSite.FailureReason): + Output only. Failure reason. + """ + + class Type(proto.Enum): + r"""Possible target site types. + + Values: + TYPE_UNSPECIFIED (0): + This value is unused. In this case, server behavior defaults + to + [Type.INCLUDE][google.cloud.discoveryengine.v1beta.TargetSite.Type.INCLUDE]. + INCLUDE (1): + Include the target site. + EXCLUDE (2): + Exclude the target site. + """ + TYPE_UNSPECIFIED = 0 + INCLUDE = 1 + EXCLUDE = 2 + + class IndexingStatus(proto.Enum): + r"""Target site indexing status enumeration. + + Values: + INDEXING_STATUS_UNSPECIFIED (0): + Defaults to SUCCEEDED. + PENDING (1): + The target site is in the update queue and + will be picked up by indexing pipeline. + FAILED (2): + The target site fails to be indexed. + SUCCEEDED (3): + The target site has been indexed. + DELETING (4): + The previously indexed target site has been + marked to be deleted. This is a transitioning + state which will resulted in either: + + 1. target site deleted if unindexing is + successful; + 2. state reverts to SUCCEEDED if the unindexing + fails. + """ + INDEXING_STATUS_UNSPECIFIED = 0 + PENDING = 1 + FAILED = 2 + SUCCEEDED = 3 + DELETING = 4 + + class FailureReason(proto.Message): + r"""Site search indexing failure reasons. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + quota_failure (google.cloud.discoveryengine_v1beta.types.TargetSite.FailureReason.QuotaFailure): + Failed due to insufficient quota. + + This field is a member of `oneof`_ ``failure``. + """ + + class QuotaFailure(proto.Message): + r"""Failed due to insufficient quota. + + Attributes: + total_required_quota (int): + This number is an estimation on how much + total quota this project needs to successfully + complete indexing. + """ + + total_required_quota: int = proto.Field( + proto.INT64, + number=1, + ) + + quota_failure: "TargetSite.FailureReason.QuotaFailure" = proto.Field( + proto.MESSAGE, + number=1, + oneof="failure", + message="TargetSite.FailureReason.QuotaFailure", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + provided_uri_pattern: str = proto.Field( + proto.STRING, + number=2, + ) + type_: Type = proto.Field( + proto.ENUM, + number=3, + enum=Type, + ) + exact_match: bool = proto.Field( + proto.BOOL, + number=6, + ) + generated_uri_pattern: str = proto.Field( + proto.STRING, + number=4, + ) + site_verification_info: "SiteVerificationInfo" = proto.Field( + proto.MESSAGE, + number=7, + message="SiteVerificationInfo", + ) + indexing_status: IndexingStatus = proto.Field( + proto.ENUM, + number=8, + enum=IndexingStatus, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + failure_reason: FailureReason = proto.Field( + proto.MESSAGE, + number=9, + message=FailureReason, + ) + + +class SiteVerificationInfo(proto.Message): + r"""Verification information for target sites in advanced site + search. + + Attributes: + site_verification_state (google.cloud.discoveryengine_v1beta.types.SiteVerificationInfo.SiteVerificationState): + Site verification state indicating the + ownership and validity. + verify_time (google.protobuf.timestamp_pb2.Timestamp): + Latest site verification time. + """ + + class SiteVerificationState(proto.Enum): + r"""Site verification state. + + Values: + SITE_VERIFICATION_STATE_UNSPECIFIED (0): + Defaults to VERIFIED. + VERIFIED (1): + Site ownership verified. + UNVERIFIED (2): + Site ownership pending verification or + verification failed. + EXEMPTED (3): + Site exempt from verification, e.g., a public + website that opens to all. + """ + SITE_VERIFICATION_STATE_UNSPECIFIED = 0 + VERIFIED = 1 + UNVERIFIED = 2 + EXEMPTED = 3 + + site_verification_state: SiteVerificationState = proto.Field( + proto.ENUM, + number=1, + enum=SiteVerificationState, + ) + verify_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/site_search_engine_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/site_search_engine_service.py new file mode 100644 index 000000000000..a10bd0207741 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/site_search_engine_service.py @@ -0,0 +1,851 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1beta.types import ( + site_search_engine as gcd_site_search_engine, +) + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1beta", + manifest={ + "GetSiteSearchEngineRequest", + "CreateTargetSiteRequest", + "CreateTargetSiteMetadata", + "BatchCreateTargetSitesRequest", + "GetTargetSiteRequest", + "UpdateTargetSiteRequest", + "UpdateTargetSiteMetadata", + "DeleteTargetSiteRequest", + "DeleteTargetSiteMetadata", + "ListTargetSitesRequest", + "ListTargetSitesResponse", + "BatchCreateTargetSiteMetadata", + "BatchCreateTargetSitesResponse", + "EnableAdvancedSiteSearchRequest", + "EnableAdvancedSiteSearchResponse", + "EnableAdvancedSiteSearchMetadata", + "DisableAdvancedSiteSearchRequest", + "DisableAdvancedSiteSearchResponse", + "DisableAdvancedSiteSearchMetadata", + "RecrawlUrisRequest", + "RecrawlUrisResponse", + "RecrawlUrisMetadata", + "BatchVerifyTargetSitesRequest", + "BatchVerifyTargetSitesResponse", + "BatchVerifyTargetSitesMetadata", + "FetchDomainVerificationStatusRequest", + "FetchDomainVerificationStatusResponse", + }, +) + + +class GetSiteSearchEngineRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.GetSiteSearchEngine][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.GetSiteSearchEngine] + method. + + Attributes: + name (str): + Required. Resource name of + [SiteSearchEngine][google.cloud.discoveryengine.v1beta.SiteSearchEngine], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to access the + [SiteSearchEngine], regardless of whether or not it exists, + a PERMISSION_DENIED error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateTargetSiteRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.CreateTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.CreateTargetSite] + method. + + Attributes: + parent (str): + Required. Parent resource name of + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + target_site (google.cloud.discoveryengine_v1beta.types.TargetSite): + Required. The + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] + to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + target_site: gcd_site_search_engine.TargetSite = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_site_search_engine.TargetSite, + ) + + +class CreateTargetSiteMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.CreateTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.CreateTargetSite] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class BatchCreateTargetSitesRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchCreateTargetSites] + method. + + Attributes: + parent (str): + Required. The parent resource shared by all TargetSites + being created. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + The parent field in the CreateBookRequest messages must + either be empty or match this field. + requests (MutableSequence[google.cloud.discoveryengine_v1beta.types.CreateTargetSiteRequest]): + Required. The request message specifying the + resources to create. A maximum of 20 TargetSites + can be created in a batch. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["CreateTargetSiteRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CreateTargetSiteRequest", + ) + + +class GetTargetSiteRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.GetTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.GetTargetSite] + method. + + Attributes: + name (str): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] + does not exist, a NOT_FOUND error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateTargetSiteRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.UpdateTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.UpdateTargetSite] + method. + + Attributes: + target_site (google.cloud.discoveryengine_v1beta.types.TargetSite): + Required. The target site to update. If the caller does not + have permission to update the + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] + to update does not exist, a NOT_FOUND error is returned. + """ + + target_site: gcd_site_search_engine.TargetSite = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_site_search_engine.TargetSite, + ) + + +class UpdateTargetSiteMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.UpdateTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.UpdateTargetSite] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class DeleteTargetSiteRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.DeleteTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DeleteTargetSite] + method. + + Attributes: + name (str): + Required. Full resource name of + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}``. + + If the caller does not have permission to access the + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite], + regardless of whether or not it exists, a PERMISSION_DENIED + error is returned. + + If the requested + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] + does not exist, a NOT_FOUND error is returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteTargetSiteMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.DeleteTargetSite][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DeleteTargetSite] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class ListTargetSitesRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.ListTargetSites] + method. + + Attributes: + parent (str): + Required. The parent site search engine resource name, such + as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + + If the caller does not have permission to list + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]s + under this site search engine, regardless of whether or not + this branch exists, a PERMISSION_DENIED error is returned. + page_size (int): + Requested page size. Server may return fewer items than + requested. If unspecified, server will pick an appropriate + default. The maximum value is 1000; values above 1000 will + be coerced to 1000. + + If this field is negative, an INVALID_ARGUMENT error is + returned. + page_token (str): + A page token, received from a previous ``ListTargetSites`` + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListTargetSites`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTargetSitesResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.ListTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.ListTargetSites] + method. + + Attributes: + target_sites (MutableSequence[google.cloud.discoveryengine_v1beta.types.TargetSite]): + List of TargetSites. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + total_size (int): + The total number of items matching the + request. This will always be populated in the + response. + """ + + @property + def raw_page(self): + return self + + target_sites: MutableSequence[ + gcd_site_search_engine.TargetSite + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_site_search_engine.TargetSite, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class BatchCreateTargetSiteMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchCreateTargetSites] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class BatchCreateTargetSitesResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.BatchCreateTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchCreateTargetSites] + method. + + Attributes: + target_sites (MutableSequence[google.cloud.discoveryengine_v1beta.types.TargetSite]): + TargetSites created. + """ + + target_sites: MutableSequence[ + gcd_site_search_engine.TargetSite + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_site_search_engine.TargetSite, + ) + + +class EnableAdvancedSiteSearchRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + + Attributes: + site_search_engine (str): + Required. Full resource name of the + [SiteSearchEngine][google.cloud.discoveryengine.v1beta.SiteSearchEngine], + such as + ``projects/{project}/locations/{location}/dataStores/{data_store_id}/siteSearchEngine``. + """ + + site_search_engine: str = proto.Field( + proto.STRING, + number=1, + ) + + +class EnableAdvancedSiteSearchResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.EnableAdvancedSiteSearch] + method. + + """ + + +class EnableAdvancedSiteSearchMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.EnableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.EnableAdvancedSiteSearch] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class DisableAdvancedSiteSearchRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + + Attributes: + site_search_engine (str): + Required. Full resource name of the + [SiteSearchEngine][google.cloud.discoveryengine.v1beta.SiteSearchEngine], + such as + ``projects/{project}/locations/{location}/dataStores/{data_store_id}/siteSearchEngine``. + """ + + site_search_engine: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DisableAdvancedSiteSearchResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DisableAdvancedSiteSearch] + method. + + """ + + +class DisableAdvancedSiteSearchMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.DisableAdvancedSiteSearch][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DisableAdvancedSiteSearch] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class RecrawlUrisRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.RecrawlUris] + method. + + Attributes: + site_search_engine (str): + Required. Full resource name of the + [SiteSearchEngine][google.cloud.discoveryengine.v1beta.SiteSearchEngine], + such as + ``projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine``. + uris (MutableSequence[str]): + Required. List of URIs to crawl. At most 10K URIs are + supported, otherwise an INVALID_ARGUMENT error is thrown. + Each URI should match at least one + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite] + in ``site_search_engine``. + """ + + site_search_engine: str = proto.Field( + proto.STRING, + number=1, + ) + uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class RecrawlUrisResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.RecrawlUris] + method. + + Attributes: + failure_samples (MutableSequence[google.cloud.discoveryengine_v1beta.types.RecrawlUrisResponse.FailureInfo]): + Details for a sample of up to 10 ``failed_uris``. + failed_uris (MutableSequence[str]): + URIs that were not crawled before the LRO + terminated. + """ + + class FailureInfo(proto.Message): + r"""Details about why a particular URI failed to be crawled. Each + FailureInfo contains one FailureReason per CorpusType. + + Attributes: + uri (str): + URI that failed to be crawled. + failure_reasons (MutableSequence[google.cloud.discoveryengine_v1beta.types.RecrawlUrisResponse.FailureInfo.FailureReason]): + List of failure reasons by corpus type (e.g. + desktop, mobile). + """ + + class FailureReason(proto.Message): + r"""Details about why crawling failed for a particular + CorpusType, e.g., DESKTOP and MOBILE crawling may fail for + different reasons. + + Attributes: + corpus_type (google.cloud.discoveryengine_v1beta.types.RecrawlUrisResponse.FailureInfo.FailureReason.CorpusType): + DESKTOP, MOBILE, or CORPUS_TYPE_UNSPECIFIED. + error_message (str): + Reason why the URI was not crawled. + """ + + class CorpusType(proto.Enum): + r"""CorpusType for the failed crawling operation. + + Values: + CORPUS_TYPE_UNSPECIFIED (0): + Default value. + DESKTOP (1): + Denotes a crawling attempt for the desktop + version of a page. + MOBILE (2): + Denotes a crawling attempt for the mobile + version of a page. + """ + CORPUS_TYPE_UNSPECIFIED = 0 + DESKTOP = 1 + MOBILE = 2 + + corpus_type: "RecrawlUrisResponse.FailureInfo.FailureReason.CorpusType" = ( + proto.Field( + proto.ENUM, + number=1, + enum="RecrawlUrisResponse.FailureInfo.FailureReason.CorpusType", + ) + ) + error_message: str = proto.Field( + proto.STRING, + number=2, + ) + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + failure_reasons: MutableSequence[ + "RecrawlUrisResponse.FailureInfo.FailureReason" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="RecrawlUrisResponse.FailureInfo.FailureReason", + ) + + failure_samples: MutableSequence[FailureInfo] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=FailureInfo, + ) + failed_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class RecrawlUrisMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.RecrawlUris][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.RecrawlUris] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + invalid_uris (MutableSequence[str]): + Unique URIs in the request that don't match + any TargetSite in the DataStore, only match + TargetSites that haven't been fully indexed, or + match a TargetSite with type EXCLUDE. + valid_uris_count (int): + Total number of unique URIs in the request that are not in + invalid_uris. + success_count (int): + Total number of URIs that have been crawled + so far. + pending_count (int): + Total number of URIs that have yet to be + crawled. + quota_exceeded_count (int): + Total number of URIs that were rejected due + to insufficient indexing resources. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + invalid_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + valid_uris_count: int = proto.Field( + proto.INT32, + number=4, + ) + success_count: int = proto.Field( + proto.INT32, + number=5, + ) + pending_count: int = proto.Field( + proto.INT32, + number=6, + ) + quota_exceeded_count: int = proto.Field( + proto.INT32, + number=7, + ) + + +class BatchVerifyTargetSitesRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchVerifyTargetSites] + method. + + Attributes: + parent (str): + Required. The parent resource shared by all TargetSites + being verified. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BatchVerifyTargetSitesResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchVerifyTargetSites] + method. + + """ + + +class BatchVerifyTargetSitesMetadata(proto.Message): + r"""Metadata related to the progress of the + [SiteSearchEngineService.BatchVerifyTargetSites][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchVerifyTargetSites] + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class FetchDomainVerificationStatusRequest(proto.Message): + r"""Request message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + + Attributes: + site_search_engine (str): + Required. The site search engine resource under which we + fetch all the domain verification status. + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/siteSearchEngine``. + page_size (int): + Requested page size. Server may return fewer items than + requested. If unspecified, server will pick an appropriate + default. The maximum value is 1000; values above 1000 will + be coerced to 1000. + + If this field is negative, an INVALID_ARGUMENT error is + returned. + page_token (str): + A page token, received from a previous + ``FetchDomainVerificationStatus`` call. Provide this to + retrieve the subsequent page. + + When paginating, all other parameters provided to + ``FetchDomainVerificationStatus`` must match the call that + provided the page token. + """ + + site_search_engine: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchDomainVerificationStatusResponse(proto.Message): + r"""Response message for + [SiteSearchEngineService.FetchDomainVerificationStatus][google.cloud.discoveryengine.v1beta.SiteSearchEngineService.FetchDomainVerificationStatus] + method. + + Attributes: + target_sites (MutableSequence[google.cloud.discoveryengine_v1beta.types.TargetSite]): + List of TargetSites containing the site + verification status. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + total_size (int): + The total number of items matching the + request. This will always be populated in the + response. + """ + + @property + def raw_page(self): + return self + + target_sites: MutableSequence[ + gcd_site_search_engine.TargetSite + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_site_search_engine.TargetSite, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py index d274d17a3428..f1a10b99a4ad 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py @@ -165,10 +165,11 @@ class UserEvent(proto.Message): conforming to https://google.aip.dev/160#filtering. Similarly, for ``view-item-list`` events that are generated - from a [RecommendationService.RecommendRequest][], this - field may be populated directly from - [RecommendationService.RecommendRequest.filter][] conforming - to https://google.aip.dev/160#filtering. + from a + [RecommendRequest][google.cloud.discoveryengine.v1beta.RecommendRequest], + this field may be populated directly from + [RecommendRequest.filter][google.cloud.discoveryengine.v1beta.RecommendRequest.filter] + conforming to https://google.aip.dev/160#filtering. The value must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an ``INVALID_ARGUMENT`` diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_import_suggestion_deny_list_entries_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_import_suggestion_deny_list_entries_async.py new file mode 100644 index 000000000000..8e322a2d56df --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_import_suggestion_deny_list_entries_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportSuggestionDenyListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_CompletionService_ImportSuggestionDenyListEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_import_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1.CompletionServiceAsyncClient() + + # Initialize request argument(s) + inline_source = discoveryengine_v1.InlineSource() + inline_source.entries.block_phrase = "block_phrase_value" + inline_source.entries.match_operator = "CONTAINS" + + request = discoveryengine_v1.ImportSuggestionDenyListEntriesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_CompletionService_ImportSuggestionDenyListEntries_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_import_suggestion_deny_list_entries_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_import_suggestion_deny_list_entries_sync.py new file mode 100644 index 000000000000..eed5cc316d4a --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_import_suggestion_deny_list_entries_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportSuggestionDenyListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_CompletionService_ImportSuggestionDenyListEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_import_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1.CompletionServiceClient() + + # Initialize request argument(s) + inline_source = discoveryengine_v1.InlineSource() + inline_source.entries.block_phrase = "block_phrase_value" + inline_source.entries.match_operator = "CONTAINS" + + request = discoveryengine_v1.ImportSuggestionDenyListEntriesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_CompletionService_ImportSuggestionDenyListEntries_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_purge_suggestion_deny_list_entries_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_purge_suggestion_deny_list_entries_async.py new file mode 100644 index 000000000000..ed0c3e74250e --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_purge_suggestion_deny_list_entries_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeSuggestionDenyListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_CompletionService_PurgeSuggestionDenyListEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_purge_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1.CompletionServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeSuggestionDenyListEntriesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.purge_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_CompletionService_PurgeSuggestionDenyListEntries_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_purge_suggestion_deny_list_entries_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_purge_suggestion_deny_list_entries_sync.py new file mode 100644 index 000000000000..752579abd84e --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_completion_service_purge_suggestion_deny_list_entries_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeSuggestionDenyListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_CompletionService_PurgeSuggestionDenyListEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_purge_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1.CompletionServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeSuggestionDenyListEntriesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.purge_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_CompletionService_PurgeSuggestionDenyListEntries_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_create_data_store_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_create_data_store_async.py new file mode 100644 index 000000000000..68a1e493dfd8 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_create_data_store_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DataStoreService_CreateDataStore_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_create_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1.CreateDataStoreRequest( + parent="parent_value", + data_store=data_store, + data_store_id="data_store_id_value", + ) + + # Make the request + operation = client.create_data_store(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DataStoreService_CreateDataStore_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_create_data_store_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_create_data_store_sync.py new file mode 100644 index 000000000000..310d213aee72 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_create_data_store_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DataStoreService_CreateDataStore_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_create_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1.CreateDataStoreRequest( + parent="parent_value", + data_store=data_store, + data_store_id="data_store_id_value", + ) + + # Make the request + operation = client.create_data_store(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DataStoreService_CreateDataStore_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_delete_data_store_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_delete_data_store_async.py new file mode 100644 index 000000000000..aa4fd6ad2084 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_delete_data_store_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DataStoreService_DeleteDataStore_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_delete_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteDataStoreRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_store(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DataStoreService_DeleteDataStore_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_delete_data_store_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_delete_data_store_sync.py new file mode 100644 index 000000000000..1336506d2fec --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_delete_data_store_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DataStoreService_DeleteDataStore_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_delete_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteDataStoreRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_store(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DataStoreService_DeleteDataStore_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_get_data_store_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_get_data_store_async.py new file mode 100644 index 000000000000..d23eb9bae7ce --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_get_data_store_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DataStoreService_GetDataStore_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_get_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetDataStoreRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_store(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DataStoreService_GetDataStore_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_get_data_store_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_get_data_store_sync.py new file mode 100644 index 000000000000..dc20de73e5a4 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_get_data_store_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DataStoreService_GetDataStore_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_get_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetDataStoreRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_store(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DataStoreService_GetDataStore_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_list_data_stores_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_list_data_stores_async.py new file mode 100644 index 000000000000..7d0df3cd61cd --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_list_data_stores_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataStores +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DataStoreService_ListDataStores_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_list_data_stores(): + # Create a client + client = discoveryengine_v1.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListDataStoresRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_stores(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_DataStoreService_ListDataStores_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_list_data_stores_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_list_data_stores_sync.py new file mode 100644 index 000000000000..5191ec2e3a8a --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_list_data_stores_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataStores +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DataStoreService_ListDataStores_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_list_data_stores(): + # Create a client + client = discoveryengine_v1.DataStoreServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListDataStoresRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_stores(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_DataStoreService_ListDataStores_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_update_data_store_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_update_data_store_async.py new file mode 100644 index 000000000000..5e5872b3a2e8 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_update_data_store_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DataStoreService_UpdateDataStore_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_update_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1.UpdateDataStoreRequest( + data_store=data_store, + ) + + # Make the request + response = await client.update_data_store(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DataStoreService_UpdateDataStore_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_update_data_store_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_update_data_store_sync.py new file mode 100644 index 000000000000..c8beba511e76 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_data_store_service_update_data_store_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DataStoreService_UpdateDataStore_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_update_data_store(): + # Create a client + client = discoveryengine_v1.DataStoreServiceClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1.UpdateDataStoreRequest( + data_store=data_store, + ) + + # Make the request + response = client.update_data_store(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DataStoreService_UpdateDataStore_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_create_engine_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_create_engine_async.py new file mode 100644 index 000000000000..bb9b6ba82c26 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_create_engine_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_EngineService_CreateEngine_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_create_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceAsyncClient() + + # Initialize request argument(s) + engine = discoveryengine_v1.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1.CreateEngineRequest( + parent="parent_value", + engine=engine, + engine_id="engine_id_value", + ) + + # Make the request + operation = client.create_engine(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_EngineService_CreateEngine_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_create_engine_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_create_engine_sync.py new file mode 100644 index 000000000000..e4b3fc8d5996 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_create_engine_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_EngineService_CreateEngine_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_create_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceClient() + + # Initialize request argument(s) + engine = discoveryengine_v1.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1.CreateEngineRequest( + parent="parent_value", + engine=engine, + engine_id="engine_id_value", + ) + + # Make the request + operation = client.create_engine(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_EngineService_CreateEngine_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_delete_engine_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_delete_engine_async.py new file mode 100644 index 000000000000..0996995fc7b5 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_delete_engine_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_EngineService_DeleteEngine_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_delete_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteEngineRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_engine(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_EngineService_DeleteEngine_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_delete_engine_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_delete_engine_sync.py new file mode 100644 index 000000000000..a9bd5cddf7a5 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_delete_engine_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_EngineService_DeleteEngine_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_delete_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteEngineRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_engine(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_EngineService_DeleteEngine_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_get_engine_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_get_engine_async.py new file mode 100644 index 000000000000..fad57921048f --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_get_engine_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_EngineService_GetEngine_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_get_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetEngineRequest( + name="name_value", + ) + + # Make the request + response = await client.get_engine(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_EngineService_GetEngine_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_get_engine_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_get_engine_sync.py new file mode 100644 index 000000000000..c80b45658a38 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_get_engine_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_EngineService_GetEngine_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_get_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetEngineRequest( + name="name_value", + ) + + # Make the request + response = client.get_engine(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_EngineService_GetEngine_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_list_engines_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_list_engines_async.py new file mode 100644 index 000000000000..648a5220538e --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_list_engines_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEngines +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_EngineService_ListEngines_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_list_engines(): + # Create a client + client = discoveryengine_v1.EngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListEnginesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_engines(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_EngineService_ListEngines_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_list_engines_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_list_engines_sync.py new file mode 100644 index 000000000000..4ade696c0c6b --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_list_engines_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEngines +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_EngineService_ListEngines_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_list_engines(): + # Create a client + client = discoveryengine_v1.EngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListEnginesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_engines(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_EngineService_ListEngines_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_update_engine_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_update_engine_async.py new file mode 100644 index 000000000000..c4dba5819494 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_update_engine_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_EngineService_UpdateEngine_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_update_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceAsyncClient() + + # Initialize request argument(s) + engine = discoveryengine_v1.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1.UpdateEngineRequest( + engine=engine, + ) + + # Make the request + response = await client.update_engine(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_EngineService_UpdateEngine_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_update_engine_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_update_engine_sync.py new file mode 100644 index 000000000000..041b001b2bcb --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_engine_service_update_engine_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_EngineService_UpdateEngine_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_update_engine(): + # Create a client + client = discoveryengine_v1.EngineServiceClient() + + # Initialize request argument(s) + engine = discoveryengine_v1.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1.UpdateEngineRequest( + engine=engine, + ) + + # Make the request + response = client.update_engine(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_EngineService_UpdateEngine_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py new file mode 100644 index 000000000000..1a43651f32ea --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_batch_create_target_sites(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + requests = discoveryengine_v1.CreateTargetSiteRequest() + requests.parent = "parent_value" + requests.target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1.BatchCreateTargetSitesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py new file mode 100644 index 000000000000..404e7a052af9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_batch_create_target_sites(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + requests = discoveryengine_v1.CreateTargetSiteRequest() + requests.parent = "parent_value" + requests.target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1.BatchCreateTargetSitesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py new file mode 100644 index 000000000000..3753138f796a --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchVerifyTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_batch_verify_target_sites(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchVerifyTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.batch_verify_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py new file mode 100644 index 000000000000..5208da90a567 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchVerifyTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_batch_verify_target_sites(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchVerifyTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.batch_verify_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_create_target_site_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_create_target_site_async.py new file mode 100644 index 000000000000..8fedffd3cf97 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_create_target_site_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_CreateTargetSite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_create_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1.CreateTargetSiteRequest( + parent="parent_value", + target_site=target_site, + ) + + # Make the request + operation = client.create_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_CreateTargetSite_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_create_target_site_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_create_target_site_sync.py new file mode 100644 index 000000000000..5512b5bdf6e6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_create_target_site_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_CreateTargetSite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_create_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1.CreateTargetSiteRequest( + parent="parent_value", + target_site=target_site, + ) + + # Make the request + operation = client.create_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_CreateTargetSite_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_delete_target_site_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_delete_target_site_async.py new file mode 100644 index 000000000000..b43145f5d201 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_delete_target_site_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_DeleteTargetSite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_delete_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteTargetSiteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_DeleteTargetSite_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_delete_target_site_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_delete_target_site_sync.py new file mode 100644 index 000000000000..6de5c785f611 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_delete_target_site_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_DeleteTargetSite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_delete_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DeleteTargetSiteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_DeleteTargetSite_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_disable_advanced_site_search_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_disable_advanced_site_search_async.py new file mode 100644 index 000000000000..26c5c93013b1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_disable_advanced_site_search_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DisableAdvancedSiteSearch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_disable_advanced_site_search(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DisableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.disable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_disable_advanced_site_search_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_disable_advanced_site_search_sync.py new file mode 100644 index 000000000000..dd432c613cc1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_disable_advanced_site_search_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DisableAdvancedSiteSearch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_disable_advanced_site_search(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.DisableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.disable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_enable_advanced_site_search_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_enable_advanced_site_search_async.py new file mode 100644 index 000000000000..d640d47f2aa7 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_enable_advanced_site_search_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnableAdvancedSiteSearch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_enable_advanced_site_search(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.EnableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.enable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_enable_advanced_site_search_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_enable_advanced_site_search_sync.py new file mode 100644 index 000000000000..5d2658aa641b --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_enable_advanced_site_search_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnableAdvancedSiteSearch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_enable_advanced_site_search(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.EnableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.enable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_fetch_domain_verification_status_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_fetch_domain_verification_status_async.py new file mode 100644 index 000000000000..ad19d4311b3b --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_fetch_domain_verification_status_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchDomainVerificationStatus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_FetchDomainVerificationStatus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_fetch_domain_verification_status(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.FetchDomainVerificationStatusRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + page_result = client.fetch_domain_verification_status(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_FetchDomainVerificationStatus_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_fetch_domain_verification_status_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_fetch_domain_verification_status_sync.py new file mode 100644 index 000000000000..e89ed2f48a5a --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_fetch_domain_verification_status_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchDomainVerificationStatus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_FetchDomainVerificationStatus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_fetch_domain_verification_status(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.FetchDomainVerificationStatusRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + page_result = client.fetch_domain_verification_status(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_FetchDomainVerificationStatus_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_get_site_search_engine_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_get_site_search_engine_async.py new file mode 100644 index 000000000000..3bae0eb19389 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_get_site_search_engine_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSiteSearchEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_GetSiteSearchEngine_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_get_site_search_engine(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetSiteSearchEngineRequest( + name="name_value", + ) + + # Make the request + response = await client.get_site_search_engine(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_GetSiteSearchEngine_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_get_site_search_engine_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_get_site_search_engine_sync.py new file mode 100644 index 000000000000..8aadbbfe66c6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_get_site_search_engine_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSiteSearchEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_GetSiteSearchEngine_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_get_site_search_engine(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetSiteSearchEngineRequest( + name="name_value", + ) + + # Make the request + response = client.get_site_search_engine(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_GetSiteSearchEngine_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_get_target_site_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_get_target_site_async.py new file mode 100644 index 000000000000..7f30e8544b46 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_get_target_site_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_GetTargetSite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_get_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetTargetSiteRequest( + name="name_value", + ) + + # Make the request + response = await client.get_target_site(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_GetTargetSite_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_get_target_site_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_get_target_site_sync.py new file mode 100644 index 000000000000..e32e9e28e952 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_get_target_site_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_GetTargetSite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_get_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.GetTargetSiteRequest( + name="name_value", + ) + + # Make the request + response = client.get_target_site(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_GetTargetSite_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_list_target_sites_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_list_target_sites_async.py new file mode 100644 index 000000000000..397f54b83b19 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_list_target_sites_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_ListTargetSites_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_list_target_sites(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_target_sites(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_ListTargetSites_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_list_target_sites_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_list_target_sites_sync.py new file mode 100644 index 000000000000..b9d4ab217aa7 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_list_target_sites_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_ListTargetSites_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_list_target_sites(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_target_sites(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_ListTargetSites_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_recrawl_uris_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_recrawl_uris_async.py new file mode 100644 index 000000000000..918c3ef72c66 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_recrawl_uris_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RecrawlUris +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_RecrawlUris_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_recrawl_uris(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.RecrawlUrisRequest( + site_search_engine="site_search_engine_value", + uris=['uris_value1', 'uris_value2'], + ) + + # Make the request + operation = client.recrawl_uris(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_RecrawlUris_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_recrawl_uris_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_recrawl_uris_sync.py new file mode 100644 index 000000000000..2c77bc5a5b43 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_recrawl_uris_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RecrawlUris +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_RecrawlUris_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_recrawl_uris(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.RecrawlUrisRequest( + site_search_engine="site_search_engine_value", + uris=['uris_value1', 'uris_value2'], + ) + + # Make the request + operation = client.recrawl_uris(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_RecrawlUris_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_update_target_site_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_update_target_site_async.py new file mode 100644 index 000000000000..9f6bbd301ad8 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_update_target_site_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_UpdateTargetSite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_update_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1.UpdateTargetSiteRequest( + target_site=target_site, + ) + + # Make the request + operation = client.update_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_UpdateTargetSite_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_update_target_site_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_update_target_site_sync.py new file mode 100644 index 000000000000..63167e97f23a --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_site_search_engine_service_update_target_site_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SiteSearchEngineService_UpdateTargetSite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_update_target_site(): + # Create a client + client = discoveryengine_v1.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1.UpdateTargetSiteRequest( + target_site=target_site, + ) + + # Make the request + operation = client.update_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SiteSearchEngineService_UpdateTargetSite_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_import_suggestion_deny_list_entries_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_import_suggestion_deny_list_entries_async.py new file mode 100644 index 000000000000..667c4cde2ec0 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_import_suggestion_deny_list_entries_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportSuggestionDenyListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_CompletionService_ImportSuggestionDenyListEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_import_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1beta.CompletionServiceAsyncClient() + + # Initialize request argument(s) + inline_source = discoveryengine_v1beta.InlineSource() + inline_source.entries.block_phrase = "block_phrase_value" + inline_source.entries.match_operator = "CONTAINS" + + request = discoveryengine_v1beta.ImportSuggestionDenyListEntriesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_CompletionService_ImportSuggestionDenyListEntries_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_import_suggestion_deny_list_entries_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_import_suggestion_deny_list_entries_sync.py new file mode 100644 index 000000000000..71a219d949dc --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_import_suggestion_deny_list_entries_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportSuggestionDenyListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_CompletionService_ImportSuggestionDenyListEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_import_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1beta.CompletionServiceClient() + + # Initialize request argument(s) + inline_source = discoveryengine_v1beta.InlineSource() + inline_source.entries.block_phrase = "block_phrase_value" + inline_source.entries.match_operator = "CONTAINS" + + request = discoveryengine_v1beta.ImportSuggestionDenyListEntriesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + operation = client.import_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_CompletionService_ImportSuggestionDenyListEntries_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_purge_suggestion_deny_list_entries_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_purge_suggestion_deny_list_entries_async.py new file mode 100644 index 000000000000..9c169469fda6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_purge_suggestion_deny_list_entries_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeSuggestionDenyListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_CompletionService_PurgeSuggestionDenyListEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_purge_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1beta.CompletionServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.PurgeSuggestionDenyListEntriesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.purge_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_CompletionService_PurgeSuggestionDenyListEntries_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_purge_suggestion_deny_list_entries_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_purge_suggestion_deny_list_entries_sync.py new file mode 100644 index 000000000000..19fe88eafda3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_completion_service_purge_suggestion_deny_list_entries_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeSuggestionDenyListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_CompletionService_PurgeSuggestionDenyListEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_purge_suggestion_deny_list_entries(): + # Create a client + client = discoveryengine_v1beta.CompletionServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.PurgeSuggestionDenyListEntriesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.purge_suggestion_deny_list_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_CompletionService_PurgeSuggestionDenyListEntries_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_create_data_store_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_create_data_store_async.py new file mode 100644 index 000000000000..0d0178356f4a --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_create_data_store_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DataStoreService_CreateDataStore_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_create_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1beta.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1beta.CreateDataStoreRequest( + parent="parent_value", + data_store=data_store, + data_store_id="data_store_id_value", + ) + + # Make the request + operation = client.create_data_store(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DataStoreService_CreateDataStore_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_create_data_store_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_create_data_store_sync.py new file mode 100644 index 000000000000..c83576b432d5 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_create_data_store_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DataStoreService_CreateDataStore_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_create_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1beta.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1beta.CreateDataStoreRequest( + parent="parent_value", + data_store=data_store, + data_store_id="data_store_id_value", + ) + + # Make the request + operation = client.create_data_store(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DataStoreService_CreateDataStore_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_delete_data_store_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_delete_data_store_async.py new file mode 100644 index 000000000000..e4468ca2923c --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_delete_data_store_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DataStoreService_DeleteDataStore_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_delete_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteDataStoreRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_store(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DataStoreService_DeleteDataStore_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_delete_data_store_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_delete_data_store_sync.py new file mode 100644 index 000000000000..39cb4e642ce6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_delete_data_store_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DataStoreService_DeleteDataStore_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_delete_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteDataStoreRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_data_store(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DataStoreService_DeleteDataStore_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_get_data_store_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_get_data_store_async.py new file mode 100644 index 000000000000..8c3e48e9b7d9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_get_data_store_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DataStoreService_GetDataStore_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_get_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetDataStoreRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_store(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DataStoreService_GetDataStore_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_get_data_store_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_get_data_store_sync.py new file mode 100644 index 000000000000..337b20e26384 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_get_data_store_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DataStoreService_GetDataStore_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_get_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetDataStoreRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_store(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DataStoreService_GetDataStore_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_list_data_stores_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_list_data_stores_async.py new file mode 100644 index 000000000000..5ba5e327cdd5 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_list_data_stores_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataStores +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DataStoreService_ListDataStores_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_list_data_stores(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListDataStoresRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_stores(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_DataStoreService_ListDataStores_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_list_data_stores_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_list_data_stores_sync.py new file mode 100644 index 000000000000..bf32f18898a4 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_list_data_stores_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataStores +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DataStoreService_ListDataStores_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_list_data_stores(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListDataStoresRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_stores(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_DataStoreService_ListDataStores_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_update_data_store_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_update_data_store_async.py new file mode 100644 index 000000000000..f27d55d0abae --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_update_data_store_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DataStoreService_UpdateDataStore_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_update_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceAsyncClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1beta.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1beta.UpdateDataStoreRequest( + data_store=data_store, + ) + + # Make the request + response = await client.update_data_store(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DataStoreService_UpdateDataStore_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_update_data_store_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_update_data_store_sync.py new file mode 100644 index 000000000000..01f4a45f7cb6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_data_store_service_update_data_store_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataStore +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DataStoreService_UpdateDataStore_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_update_data_store(): + # Create a client + client = discoveryengine_v1beta.DataStoreServiceClient() + + # Initialize request argument(s) + data_store = discoveryengine_v1beta.DataStore() + data_store.display_name = "display_name_value" + + request = discoveryengine_v1beta.UpdateDataStoreRequest( + data_store=data_store, + ) + + # Make the request + response = client.update_data_store(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DataStoreService_UpdateDataStore_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_create_engine_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_create_engine_async.py new file mode 100644 index 000000000000..8cbaee9c044a --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_create_engine_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_EngineService_CreateEngine_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_create_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceAsyncClient() + + # Initialize request argument(s) + engine = discoveryengine_v1beta.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1beta.CreateEngineRequest( + parent="parent_value", + engine=engine, + engine_id="engine_id_value", + ) + + # Make the request + operation = client.create_engine(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_EngineService_CreateEngine_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_create_engine_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_create_engine_sync.py new file mode 100644 index 000000000000..267015210f30 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_create_engine_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_EngineService_CreateEngine_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_create_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceClient() + + # Initialize request argument(s) + engine = discoveryengine_v1beta.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1beta.CreateEngineRequest( + parent="parent_value", + engine=engine, + engine_id="engine_id_value", + ) + + # Make the request + operation = client.create_engine(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_EngineService_CreateEngine_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_delete_engine_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_delete_engine_async.py new file mode 100644 index 000000000000..925b9ca63521 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_delete_engine_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_EngineService_DeleteEngine_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_delete_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteEngineRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_engine(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_EngineService_DeleteEngine_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_delete_engine_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_delete_engine_sync.py new file mode 100644 index 000000000000..ff742e050435 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_delete_engine_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_EngineService_DeleteEngine_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_delete_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteEngineRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_engine(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_EngineService_DeleteEngine_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_get_engine_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_get_engine_async.py new file mode 100644 index 000000000000..3c3cbcdbee09 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_get_engine_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_EngineService_GetEngine_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_get_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetEngineRequest( + name="name_value", + ) + + # Make the request + response = await client.get_engine(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_EngineService_GetEngine_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_get_engine_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_get_engine_sync.py new file mode 100644 index 000000000000..d8be134649d2 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_get_engine_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_EngineService_GetEngine_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_get_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetEngineRequest( + name="name_value", + ) + + # Make the request + response = client.get_engine(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_EngineService_GetEngine_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_list_engines_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_list_engines_async.py new file mode 100644 index 000000000000..78d7adf2ea07 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_list_engines_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEngines +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_EngineService_ListEngines_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_list_engines(): + # Create a client + client = discoveryengine_v1beta.EngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListEnginesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_engines(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_EngineService_ListEngines_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_list_engines_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_list_engines_sync.py new file mode 100644 index 000000000000..97632d35878e --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_list_engines_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEngines +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_EngineService_ListEngines_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_list_engines(): + # Create a client + client = discoveryengine_v1beta.EngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListEnginesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_engines(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_EngineService_ListEngines_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_update_engine_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_update_engine_async.py new file mode 100644 index 000000000000..49c8c29224aa --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_update_engine_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_EngineService_UpdateEngine_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_update_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceAsyncClient() + + # Initialize request argument(s) + engine = discoveryengine_v1beta.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1beta.UpdateEngineRequest( + engine=engine, + ) + + # Make the request + response = await client.update_engine(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_EngineService_UpdateEngine_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_update_engine_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_update_engine_sync.py new file mode 100644 index 000000000000..5f2853022bf4 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_engine_service_update_engine_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_EngineService_UpdateEngine_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_update_engine(): + # Create a client + client = discoveryengine_v1beta.EngineServiceClient() + + # Initialize request argument(s) + engine = discoveryengine_v1beta.Engine() + engine.display_name = "display_name_value" + engine.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1beta.UpdateEngineRequest( + engine=engine, + ) + + # Make the request + response = client.update_engine(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_EngineService_UpdateEngine_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_get_serving_config_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_get_serving_config_async.py new file mode 100644 index 000000000000..2ba844ca790d --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_get_serving_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetServingConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ServingConfigService_GetServingConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_get_serving_config(): + # Create a client + client = discoveryengine_v1beta.ServingConfigServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetServingConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_serving_config(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_ServingConfigService_GetServingConfig_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_get_serving_config_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_get_serving_config_sync.py new file mode 100644 index 000000000000..a2408daed7df --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_get_serving_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetServingConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ServingConfigService_GetServingConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_get_serving_config(): + # Create a client + client = discoveryengine_v1beta.ServingConfigServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetServingConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_serving_config(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_ServingConfigService_GetServingConfig_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_list_serving_configs_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_list_serving_configs_async.py new file mode 100644 index 000000000000..6f3afab714bf --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_list_serving_configs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListServingConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ServingConfigService_ListServingConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_list_serving_configs(): + # Create a client + client = discoveryengine_v1beta.ServingConfigServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListServingConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_serving_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_ServingConfigService_ListServingConfigs_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_list_serving_configs_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_list_serving_configs_sync.py new file mode 100644 index 000000000000..9aff0c53c719 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_list_serving_configs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListServingConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ServingConfigService_ListServingConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_list_serving_configs(): + # Create a client + client = discoveryengine_v1beta.ServingConfigServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListServingConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_serving_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_ServingConfigService_ListServingConfigs_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_update_serving_config_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_update_serving_config_async.py new file mode 100644 index 000000000000..25f0de729641 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_update_serving_config_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateServingConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ServingConfigService_UpdateServingConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_update_serving_config(): + # Create a client + client = discoveryengine_v1beta.ServingConfigServiceAsyncClient() + + # Initialize request argument(s) + serving_config = discoveryengine_v1beta.ServingConfig() + serving_config.media_config.content_watched_percentage_threshold = 0.3811 + serving_config.display_name = "display_name_value" + serving_config.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1beta.UpdateServingConfigRequest( + serving_config=serving_config, + ) + + # Make the request + response = await client.update_serving_config(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_ServingConfigService_UpdateServingConfig_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_update_serving_config_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_update_serving_config_sync.py new file mode 100644 index 000000000000..8030da98ad94 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_serving_config_service_update_serving_config_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateServingConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_ServingConfigService_UpdateServingConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_update_serving_config(): + # Create a client + client = discoveryengine_v1beta.ServingConfigServiceClient() + + # Initialize request argument(s) + serving_config = discoveryengine_v1beta.ServingConfig() + serving_config.media_config.content_watched_percentage_threshold = 0.3811 + serving_config.display_name = "display_name_value" + serving_config.solution_type = "SOLUTION_TYPE_CHAT" + + request = discoveryengine_v1beta.UpdateServingConfigRequest( + serving_config=serving_config, + ) + + # Make the request + response = client.update_serving_config(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_ServingConfigService_UpdateServingConfig_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_batch_create_target_sites_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_batch_create_target_sites_async.py new file mode 100644 index 000000000000..09d2bd49c9f1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_batch_create_target_sites_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_BatchCreateTargetSites_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_batch_create_target_sites(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + requests = discoveryengine_v1beta.CreateTargetSiteRequest() + requests.parent = "parent_value" + requests.target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1beta.BatchCreateTargetSitesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_BatchCreateTargetSites_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_batch_create_target_sites_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_batch_create_target_sites_sync.py new file mode 100644 index 000000000000..934d39798d02 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_batch_create_target_sites_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_BatchCreateTargetSites_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_batch_create_target_sites(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + requests = discoveryengine_v1beta.CreateTargetSiteRequest() + requests.parent = "parent_value" + requests.target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1beta.BatchCreateTargetSitesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + operation = client.batch_create_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_BatchCreateTargetSites_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_batch_verify_target_sites_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_batch_verify_target_sites_async.py new file mode 100644 index 000000000000..24912fa17cf8 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_batch_verify_target_sites_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchVerifyTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_BatchVerifyTargetSites_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_batch_verify_target_sites(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchVerifyTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.batch_verify_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_BatchVerifyTargetSites_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_batch_verify_target_sites_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_batch_verify_target_sites_sync.py new file mode 100644 index 000000000000..6d3e3e8e6a9b --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_batch_verify_target_sites_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchVerifyTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_BatchVerifyTargetSites_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_batch_verify_target_sites(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchVerifyTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.batch_verify_target_sites(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_BatchVerifyTargetSites_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_create_target_site_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_create_target_site_async.py new file mode 100644 index 000000000000..03caf8e65cc6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_create_target_site_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_CreateTargetSite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_create_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1beta.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1beta.CreateTargetSiteRequest( + parent="parent_value", + target_site=target_site, + ) + + # Make the request + operation = client.create_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_CreateTargetSite_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_create_target_site_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_create_target_site_sync.py new file mode 100644 index 000000000000..d483a8f78dfb --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_create_target_site_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_CreateTargetSite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_create_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1beta.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1beta.CreateTargetSiteRequest( + parent="parent_value", + target_site=target_site, + ) + + # Make the request + operation = client.create_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_CreateTargetSite_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_delete_target_site_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_delete_target_site_async.py new file mode 100644 index 000000000000..46fbb04af548 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_delete_target_site_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_DeleteTargetSite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_delete_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteTargetSiteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_DeleteTargetSite_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_delete_target_site_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_delete_target_site_sync.py new file mode 100644 index 000000000000..7c177adb49ad --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_delete_target_site_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_DeleteTargetSite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_delete_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DeleteTargetSiteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_DeleteTargetSite_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_disable_advanced_site_search_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_disable_advanced_site_search_async.py new file mode 100644 index 000000000000..83bb8f52156a --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_disable_advanced_site_search_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DisableAdvancedSiteSearch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_disable_advanced_site_search(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DisableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.disable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_disable_advanced_site_search_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_disable_advanced_site_search_sync.py new file mode 100644 index 000000000000..b27c498ab4e2 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_disable_advanced_site_search_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DisableAdvancedSiteSearch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_disable_advanced_site_search(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.DisableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.disable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_enable_advanced_site_search_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_enable_advanced_site_search_async.py new file mode 100644 index 000000000000..6321a946d101 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_enable_advanced_site_search_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnableAdvancedSiteSearch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_enable_advanced_site_search(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.EnableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.enable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_enable_advanced_site_search_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_enable_advanced_site_search_sync.py new file mode 100644 index 000000000000..5ac89be68aef --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_enable_advanced_site_search_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnableAdvancedSiteSearch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_enable_advanced_site_search(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.EnableAdvancedSiteSearchRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + operation = client.enable_advanced_site_search(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_fetch_domain_verification_status_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_fetch_domain_verification_status_async.py new file mode 100644 index 000000000000..66b15e1c1eac --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_fetch_domain_verification_status_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchDomainVerificationStatus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_FetchDomainVerificationStatus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_fetch_domain_verification_status(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.FetchDomainVerificationStatusRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + page_result = client.fetch_domain_verification_status(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_FetchDomainVerificationStatus_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_fetch_domain_verification_status_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_fetch_domain_verification_status_sync.py new file mode 100644 index 000000000000..45998b72a897 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_fetch_domain_verification_status_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchDomainVerificationStatus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_FetchDomainVerificationStatus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_fetch_domain_verification_status(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.FetchDomainVerificationStatusRequest( + site_search_engine="site_search_engine_value", + ) + + # Make the request + page_result = client.fetch_domain_verification_status(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_FetchDomainVerificationStatus_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_get_site_search_engine_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_get_site_search_engine_async.py new file mode 100644 index 000000000000..8497a9e2d6e3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_get_site_search_engine_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSiteSearchEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_GetSiteSearchEngine_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_get_site_search_engine(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetSiteSearchEngineRequest( + name="name_value", + ) + + # Make the request + response = await client.get_site_search_engine(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_GetSiteSearchEngine_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_get_site_search_engine_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_get_site_search_engine_sync.py new file mode 100644 index 000000000000..578cbe5e2e3a --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_get_site_search_engine_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSiteSearchEngine +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_GetSiteSearchEngine_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_get_site_search_engine(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetSiteSearchEngineRequest( + name="name_value", + ) + + # Make the request + response = client.get_site_search_engine(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_GetSiteSearchEngine_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_get_target_site_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_get_target_site_async.py new file mode 100644 index 000000000000..ca90e8dc243b --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_get_target_site_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_GetTargetSite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_get_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetTargetSiteRequest( + name="name_value", + ) + + # Make the request + response = await client.get_target_site(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_GetTargetSite_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_get_target_site_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_get_target_site_sync.py new file mode 100644 index 000000000000..7f9719904390 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_get_target_site_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_GetTargetSite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_get_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.GetTargetSiteRequest( + name="name_value", + ) + + # Make the request + response = client.get_target_site(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_GetTargetSite_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_list_target_sites_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_list_target_sites_async.py new file mode 100644 index 000000000000..c0e4a3a7ffc1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_list_target_sites_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_ListTargetSites_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_list_target_sites(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_target_sites(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_ListTargetSites_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_list_target_sites_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_list_target_sites_sync.py new file mode 100644 index 000000000000..9d4cb0152373 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_list_target_sites_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTargetSites +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_ListTargetSites_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_list_target_sites(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.ListTargetSitesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_target_sites(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_ListTargetSites_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_recrawl_uris_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_recrawl_uris_async.py new file mode 100644 index 000000000000..0b63cf49d758 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_recrawl_uris_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RecrawlUris +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_RecrawlUris_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_recrawl_uris(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.RecrawlUrisRequest( + site_search_engine="site_search_engine_value", + uris=['uris_value1', 'uris_value2'], + ) + + # Make the request + operation = client.recrawl_uris(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_RecrawlUris_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_recrawl_uris_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_recrawl_uris_sync.py new file mode 100644 index 000000000000..237c1f5e859f --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_recrawl_uris_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RecrawlUris +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_RecrawlUris_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_recrawl_uris(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.RecrawlUrisRequest( + site_search_engine="site_search_engine_value", + uris=['uris_value1', 'uris_value2'], + ) + + # Make the request + operation = client.recrawl_uris(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_RecrawlUris_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_update_target_site_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_update_target_site_async.py new file mode 100644 index 000000000000..12f31975c37d --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_update_target_site_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_UpdateTargetSite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_update_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1beta.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1beta.UpdateTargetSiteRequest( + target_site=target_site, + ) + + # Make the request + operation = client.update_target_site(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_UpdateTargetSite_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_update_target_site_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_update_target_site_sync.py new file mode 100644 index 000000000000..6d652afb0114 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_site_search_engine_service_update_target_site_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTargetSite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_SiteSearchEngineService_UpdateTargetSite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_update_target_site(): + # Create a client + client = discoveryengine_v1beta.SiteSearchEngineServiceClient() + + # Initialize request argument(s) + target_site = discoveryengine_v1beta.TargetSite() + target_site.provided_uri_pattern = "provided_uri_pattern_value" + + request = discoveryengine_v1beta.UpdateTargetSiteRequest( + target_site=target_site, + ) + + # Make the request + operation = client.update_target_site(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_SiteSearchEngineService_UpdateTargetSite_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json index 271ec68b1f51..61cf4181bdba 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.11.7" + "version": "0.1.0" }, "snippets": [ { @@ -169,30 +169,22 @@ "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", - "shortName": "ConversationalSearchServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.CompletionServiceAsyncClient", + "shortName": "CompletionServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.converse_conversation", + "fullName": "google.cloud.discoveryengine_v1.CompletionServiceAsyncClient.import_suggestion_deny_list_entries", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ConverseConversation", + "fullName": "google.cloud.discoveryengine.v1.CompletionService.ImportSuggestionDenyListEntries", "service": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1.CompletionService", + "shortName": "CompletionService" }, - "shortName": "ConverseConversation" + "shortName": "ImportSuggestionDenyListEntries" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ConverseConversationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "query", - "type": "google.cloud.discoveryengine_v1.types.TextInput" + "type": "google.cloud.discoveryengine_v1.types.ImportSuggestionDenyListEntriesRequest" }, { "name": "retry", @@ -207,22 +199,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.ConverseConversationResponse", - "shortName": "converse_conversation" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_suggestion_deny_list_entries" }, - "description": "Sample for ConverseConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_async.py", + "description": "Sample for ImportSuggestionDenyListEntries", + "file": "discoveryengine_v1_generated_completion_service_import_suggestion_deny_list_entries_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ConverseConversation_async", + "regionTag": "discoveryengine_v1_generated_CompletionService_ImportSuggestionDenyListEntries_async", "segments": [ { - "end": 51, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 60, "start": 27, "type": "SHORT" }, @@ -232,51 +224,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_async.py" + "title": "discoveryengine_v1_generated_completion_service_import_suggestion_deny_list_entries_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", - "shortName": "ConversationalSearchServiceClient" + "fullName": "google.cloud.discoveryengine_v1.CompletionServiceClient", + "shortName": "CompletionServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.converse_conversation", + "fullName": "google.cloud.discoveryengine_v1.CompletionServiceClient.import_suggestion_deny_list_entries", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ConverseConversation", + "fullName": "google.cloud.discoveryengine.v1.CompletionService.ImportSuggestionDenyListEntries", "service": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1.CompletionService", + "shortName": "CompletionService" }, - "shortName": "ConverseConversation" + "shortName": "ImportSuggestionDenyListEntries" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ConverseConversationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "query", - "type": "google.cloud.discoveryengine_v1.types.TextInput" + "type": "google.cloud.discoveryengine_v1.types.ImportSuggestionDenyListEntriesRequest" }, { "name": "retry", @@ -291,22 +275,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.ConverseConversationResponse", - "shortName": "converse_conversation" + "resultType": "google.api_core.operation.Operation", + "shortName": "import_suggestion_deny_list_entries" }, - "description": "Sample for ConverseConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_sync.py", + "description": "Sample for ImportSuggestionDenyListEntries", + "file": "discoveryengine_v1_generated_completion_service_import_suggestion_deny_list_entries_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ConverseConversation_sync", + "regionTag": "discoveryengine_v1_generated_CompletionService_ImportSuggestionDenyListEntries_sync", "segments": [ { - "end": 51, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 60, "start": 27, "type": "SHORT" }, @@ -316,52 +300,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_sync.py" + "title": "discoveryengine_v1_generated_completion_service_import_suggestion_deny_list_entries_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", - "shortName": "ConversationalSearchServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.CompletionServiceAsyncClient", + "shortName": "CompletionServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.create_conversation", + "fullName": "google.cloud.discoveryengine_v1.CompletionServiceAsyncClient.purge_suggestion_deny_list_entries", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.CreateConversation", + "fullName": "google.cloud.discoveryengine.v1.CompletionService.PurgeSuggestionDenyListEntries", "service": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1.CompletionService", + "shortName": "CompletionService" }, - "shortName": "CreateConversation" + "shortName": "PurgeSuggestionDenyListEntries" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.CreateConversationRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "conversation", - "type": "google.cloud.discoveryengine_v1.types.Conversation" + "type": "google.cloud.discoveryengine_v1.types.PurgeSuggestionDenyListEntriesRequest" }, { "name": "retry", @@ -376,22 +352,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Conversation", - "shortName": "create_conversation" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "purge_suggestion_deny_list_entries" }, - "description": "Sample for CreateConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_create_conversation_async.py", + "description": "Sample for PurgeSuggestionDenyListEntries", + "file": "discoveryengine_v1_generated_completion_service_purge_suggestion_deny_list_entries_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_CreateConversation_async", + "regionTag": "discoveryengine_v1_generated_CompletionService_PurgeSuggestionDenyListEntries_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -406,46 +382,38 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_create_conversation_async.py" + "title": "discoveryengine_v1_generated_completion_service_purge_suggestion_deny_list_entries_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", - "shortName": "ConversationalSearchServiceClient" + "fullName": "google.cloud.discoveryengine_v1.CompletionServiceClient", + "shortName": "CompletionServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.create_conversation", + "fullName": "google.cloud.discoveryengine_v1.CompletionServiceClient.purge_suggestion_deny_list_entries", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.CreateConversation", + "fullName": "google.cloud.discoveryengine.v1.CompletionService.PurgeSuggestionDenyListEntries", "service": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1.CompletionService", + "shortName": "CompletionService" }, - "shortName": "CreateConversation" + "shortName": "PurgeSuggestionDenyListEntries" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.CreateConversationRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "conversation", - "type": "google.cloud.discoveryengine_v1.types.Conversation" + "type": "google.cloud.discoveryengine_v1.types.PurgeSuggestionDenyListEntriesRequest" }, { "name": "retry", @@ -460,22 +428,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Conversation", - "shortName": "create_conversation" + "resultType": "google.api_core.operation.Operation", + "shortName": "purge_suggestion_deny_list_entries" }, - "description": "Sample for CreateConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_create_conversation_sync.py", + "description": "Sample for PurgeSuggestionDenyListEntries", + "file": "discoveryengine_v1_generated_completion_service_purge_suggestion_deny_list_entries_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_CreateConversation_sync", + "regionTag": "discoveryengine_v1_generated_CompletionService_PurgeSuggestionDenyListEntries_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -490,17 +458,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_create_conversation_sync.py" + "title": "discoveryengine_v1_generated_completion_service_purge_suggestion_deny_list_entries_sync.py" }, { "canonical": true, @@ -510,24 +478,28 @@ "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.delete_conversation", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.converse_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.DeleteConversation", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ConverseConversation", "service": { "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "DeleteConversation" + "shortName": "ConverseConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.DeleteConversationRequest" + "type": "google.cloud.discoveryengine_v1.types.ConverseConversationRequest" }, { "name": "name", "type": "str" }, + { + "name": "query", + "type": "google.cloud.discoveryengine_v1.types.TextInput" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -541,21 +513,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_conversation" + "resultType": "google.cloud.discoveryengine_v1.types.ConverseConversationResponse", + "shortName": "converse_conversation" }, - "description": "Sample for DeleteConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_async.py", + "description": "Sample for ConverseConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_DeleteConversation_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ConverseConversation_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -570,15 +543,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_async.py" }, { "canonical": true, @@ -587,24 +562,28 @@ "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.delete_conversation", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.converse_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.DeleteConversation", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ConverseConversation", "service": { "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "DeleteConversation" + "shortName": "ConverseConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.DeleteConversationRequest" + "type": "google.cloud.discoveryengine_v1.types.ConverseConversationRequest" }, { "name": "name", "type": "str" }, + { + "name": "query", + "type": "google.cloud.discoveryengine_v1.types.TextInput" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -618,21 +597,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_conversation" + "resultType": "google.cloud.discoveryengine_v1.types.ConverseConversationResponse", + "shortName": "converse_conversation" }, - "description": "Sample for DeleteConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_sync.py", + "description": "Sample for ConverseConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_DeleteConversation_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ConverseConversation_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -647,15 +627,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_converse_conversation_sync.py" }, { "canonical": true, @@ -665,24 +647,28 @@ "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.get_conversation", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.create_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.GetConversation", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.CreateConversation", "service": { "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "GetConversation" + "shortName": "CreateConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.GetConversationRequest" + "type": "google.cloud.discoveryengine_v1.types.CreateConversationRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "conversation", + "type": "google.cloud.discoveryengine_v1.types.Conversation" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -697,13 +683,13 @@ } ], "resultType": "google.cloud.discoveryengine_v1.types.Conversation", - "shortName": "get_conversation" + "shortName": "create_conversation" }, - "description": "Sample for GetConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_get_conversation_async.py", + "description": "Sample for CreateConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_create_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_GetConversation_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_CreateConversation_async", "segments": [ { "end": 51, @@ -736,7 +722,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_get_conversation_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_create_conversation_async.py" }, { "canonical": true, @@ -745,24 +731,28 @@ "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.get_conversation", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.create_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.GetConversation", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.CreateConversation", "service": { "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "GetConversation" + "shortName": "CreateConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.GetConversationRequest" + "type": "google.cloud.discoveryengine_v1.types.CreateConversationRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "conversation", + "type": "google.cloud.discoveryengine_v1.types.Conversation" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -777,13 +767,13 @@ } ], "resultType": "google.cloud.discoveryengine_v1.types.Conversation", - "shortName": "get_conversation" + "shortName": "create_conversation" }, - "description": "Sample for GetConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_get_conversation_sync.py", + "description": "Sample for CreateConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_create_conversation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_GetConversation_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_CreateConversation_sync", "segments": [ { "end": 51, @@ -816,7 +806,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_get_conversation_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_create_conversation_sync.py" }, { "canonical": true, @@ -826,22 +816,22 @@ "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.list_conversations", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.delete_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ListConversations", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.DeleteConversation", "service": { "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "ListConversations" + "shortName": "DeleteConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ListConversationsRequest" + "type": "google.cloud.discoveryengine_v1.types.DeleteConversationRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -857,22 +847,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.conversational_search_service.pagers.ListConversationsAsyncPager", - "shortName": "list_conversations" + "shortName": "delete_conversation" }, - "description": "Sample for ListConversations", - "file": "discoveryengine_v1_generated_conversational_search_service_list_conversations_async.py", + "description": "Sample for DeleteConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ListConversations_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_DeleteConversation_async", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -887,17 +876,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_list_conversations_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_async.py" }, { "canonical": true, @@ -906,22 +893,22 @@ "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.list_conversations", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.delete_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ListConversations", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.DeleteConversation", "service": { "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "ListConversations" + "shortName": "DeleteConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ListConversationsRequest" + "type": "google.cloud.discoveryengine_v1.types.DeleteConversationRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -937,22 +924,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.conversational_search_service.pagers.ListConversationsPager", - "shortName": "list_conversations" + "shortName": "delete_conversation" }, - "description": "Sample for ListConversations", - "file": "discoveryengine_v1_generated_conversational_search_service_list_conversations_sync.py", + "description": "Sample for DeleteConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ListConversations_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_DeleteConversation_sync", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -967,17 +953,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_list_conversations_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_delete_conversation_sync.py" }, { "canonical": true, @@ -987,27 +971,23 @@ "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.update_conversation", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.get_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.UpdateConversation", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.GetConversation", "service": { "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "UpdateConversation" + "shortName": "GetConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.UpdateConversationRequest" - }, - { - "name": "conversation", - "type": "google.cloud.discoveryengine_v1.types.Conversation" + "type": "google.cloud.discoveryengine_v1.types.GetConversationRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -1023,21 +1003,21 @@ } ], "resultType": "google.cloud.discoveryengine_v1.types.Conversation", - "shortName": "update_conversation" + "shortName": "get_conversation" }, - "description": "Sample for UpdateConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_update_conversation_async.py", + "description": "Sample for GetConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_get_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_UpdateConversation_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_GetConversation_async", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1047,22 +1027,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_update_conversation_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_get_conversation_async.py" }, { "canonical": true, @@ -1071,27 +1051,23 @@ "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.update_conversation", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.get_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.UpdateConversation", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.GetConversation", "service": { "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "UpdateConversation" + "shortName": "GetConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.UpdateConversationRequest" - }, - { - "name": "conversation", - "type": "google.cloud.discoveryengine_v1.types.Conversation" + "type": "google.cloud.discoveryengine_v1.types.GetConversationRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -1107,21 +1083,21 @@ } ], "resultType": "google.cloud.discoveryengine_v1.types.Conversation", - "shortName": "update_conversation" + "shortName": "get_conversation" }, - "description": "Sample for UpdateConversation", - "file": "discoveryengine_v1_generated_conversational_search_service_update_conversation_sync.py", + "description": "Sample for GetConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_get_conversation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_UpdateConversation_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_GetConversation_sync", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1131,57 +1107,49 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_conversational_search_service_update_conversation_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_get_conversation_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.create_document", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.list_conversations", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.CreateDocument", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ListConversations", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "CreateDocument" + "shortName": "ListConversations" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.CreateDocumentRequest" + "type": "google.cloud.discoveryengine_v1.types.ListConversationsRequest" }, { "name": "parent", "type": "str" }, - { - "name": "document", - "type": "google.cloud.discoveryengine_v1.types.Document" - }, - { - "name": "document_id", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1195,14 +1163,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Document", - "shortName": "create_document" + "resultType": "google.cloud.discoveryengine_v1.services.conversational_search_service.pagers.ListConversationsAsyncPager", + "shortName": "list_conversations" }, - "description": "Sample for CreateDocument", - "file": "discoveryengine_v1_generated_document_service_create_document_async.py", + "description": "Sample for ListConversations", + "file": "discoveryengine_v1_generated_conversational_search_service_list_conversations_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_CreateDocument_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ListConversations_async", "segments": [ { "end": 52, @@ -1220,56 +1188,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_create_document_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_list_conversations_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.create_document", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.list_conversations", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.CreateDocument", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.ListConversations", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "CreateDocument" + "shortName": "ListConversations" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.CreateDocumentRequest" + "type": "google.cloud.discoveryengine_v1.types.ListConversationsRequest" }, { "name": "parent", "type": "str" }, - { - "name": "document", - "type": "google.cloud.discoveryengine_v1.types.Document" - }, - { - "name": "document_id", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1283,14 +1243,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Document", - "shortName": "create_document" + "resultType": "google.cloud.discoveryengine_v1.services.conversational_search_service.pagers.ListConversationsPager", + "shortName": "list_conversations" }, - "description": "Sample for CreateDocument", - "file": "discoveryengine_v1_generated_document_service_create_document_sync.py", + "description": "Sample for ListConversations", + "file": "discoveryengine_v1_generated_conversational_search_service_list_conversations_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_CreateDocument_sync", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_ListConversations_sync", "segments": [ { "end": 52, @@ -1308,48 +1268,52 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_create_document_sync.py" + "title": "discoveryengine_v1_generated_conversational_search_service_list_conversations_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.delete_document", + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceAsyncClient.update_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.DeleteDocument", + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.UpdateConversation", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" }, - "shortName": "DeleteDocument" + "shortName": "UpdateConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.DeleteDocumentRequest" + "type": "google.cloud.discoveryengine_v1.types.UpdateConversationRequest" }, { - "name": "name", - "type": "str" + "name": "conversation", + "type": "google.cloud.discoveryengine_v1.types.Conversation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -1364,21 +1328,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_document" + "resultType": "google.cloud.discoveryengine_v1.types.Conversation", + "shortName": "update_conversation" }, - "description": "Sample for DeleteDocument", - "file": "discoveryengine_v1_generated_document_service_delete_document_async.py", + "description": "Sample for UpdateConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_update_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_DeleteDocument_async", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_UpdateConversation_async", "segments": [ { - "end": 49, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 50, "start": 27, "type": "SHORT" }, @@ -1388,44 +1353,139 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_conversational_search_service_update_conversation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.ConversationalSearchServiceClient.update_conversation", + "method": { + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService.UpdateConversation", + "service": { + "fullName": "google.cloud.discoveryengine.v1.ConversationalSearchService", + "shortName": "ConversationalSearchService" + }, + "shortName": "UpdateConversation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateConversationRequest" + }, + { + "name": "conversation", + "type": "google.cloud.discoveryengine_v1.types.Conversation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Conversation", + "shortName": "update_conversation" + }, + "description": "Sample for UpdateConversation", + "file": "discoveryengine_v1_generated_conversational_search_service_update_conversation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_ConversationalSearchService_UpdateConversation_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, { "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_delete_document_async.py" + "title": "discoveryengine_v1_generated_conversational_search_service_update_conversation_sync.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient", + "shortName": "DataStoreServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.delete_document", + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient.create_data_store", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.DeleteDocument", + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.CreateDataStore", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" }, - "shortName": "DeleteDocument" + "shortName": "CreateDataStore" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.DeleteDocumentRequest" + "type": "google.cloud.discoveryengine_v1.types.CreateDataStoreRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "data_store", + "type": "google.cloud.discoveryengine_v1.types.DataStore" + }, + { + "name": "data_store_id", "type": "str" }, { @@ -1441,21 +1501,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_document" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_data_store" }, - "description": "Sample for DeleteDocument", - "file": "discoveryengine_v1_generated_document_service_delete_document_sync.py", + "description": "Sample for CreateDataStore", + "file": "discoveryengine_v1_generated_data_store_service_create_data_store_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_DeleteDocument_sync", + "regionTag": "discoveryengine_v1_generated_DataStoreService_CreateDataStore_async", "segments": [ { - "end": 49, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 60, "start": 27, "type": "SHORT" }, @@ -1465,45 +1526,54 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_delete_document_sync.py" + "title": "discoveryengine_v1_generated_data_store_service_create_data_store_async.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient", + "shortName": "DataStoreServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.get_document", + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient.create_data_store", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.GetDocument", + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.CreateDataStore", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" }, - "shortName": "GetDocument" + "shortName": "CreateDataStore" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.GetDocumentRequest" + "type": "google.cloud.discoveryengine_v1.types.CreateDataStoreRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "data_store", + "type": "google.cloud.discoveryengine_v1.types.DataStore" + }, + { + "name": "data_store_id", "type": "str" }, { @@ -1519,22 +1589,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Document", - "shortName": "get_document" + "resultType": "google.api_core.operation.Operation", + "shortName": "create_data_store" }, - "description": "Sample for GetDocument", - "file": "discoveryengine_v1_generated_document_service_get_document_async.py", + "description": "Sample for CreateDataStore", + "file": "discoveryengine_v1_generated_data_store_service_create_data_store_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_GetDocument_async", + "regionTag": "discoveryengine_v1_generated_DataStoreService_CreateDataStore_sync", "segments": [ { - "end": 51, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 60, "start": 27, "type": "SHORT" }, @@ -1544,43 +1614,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_get_document_async.py" + "title": "discoveryengine_v1_generated_data_store_service_create_data_store_sync.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient", + "shortName": "DataStoreServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.get_document", + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient.delete_data_store", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.GetDocument", + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.DeleteDataStore", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" }, - "shortName": "GetDocument" + "shortName": "DeleteDataStore" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.GetDocumentRequest" + "type": "google.cloud.discoveryengine_v1.types.DeleteDataStoreRequest" }, { "name": "name", @@ -1599,22 +1670,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Document", - "shortName": "get_document" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_data_store" }, - "description": "Sample for GetDocument", - "file": "discoveryengine_v1_generated_document_service_get_document_sync.py", + "description": "Sample for DeleteDataStore", + "file": "discoveryengine_v1_generated_data_store_service_delete_data_store_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_GetDocument_sync", + "regionTag": "discoveryengine_v1_generated_DataStoreService_DeleteDataStore_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1629,39 +1700,3832 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_get_document_sync.py" + "title": "discoveryengine_v1_generated_data_store_service_delete_data_store_async.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient", + "shortName": "DataStoreServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.import_documents", + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient.delete_data_store", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.ImportDocuments", + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.DeleteDataStore", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "DeleteDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteDataStoreRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_data_store" + }, + "description": "Sample for DeleteDataStore", + "file": "discoveryengine_v1_generated_data_store_service_delete_data_store_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_DeleteDataStore_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_delete_data_store_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient", + "shortName": "DataStoreServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient.get_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.GetDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "GetDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetDataStoreRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.DataStore", + "shortName": "get_data_store" + }, + "description": "Sample for GetDataStore", + "file": "discoveryengine_v1_generated_data_store_service_get_data_store_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_GetDataStore_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_get_data_store_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient", + "shortName": "DataStoreServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient.get_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.GetDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "GetDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetDataStoreRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.DataStore", + "shortName": "get_data_store" + }, + "description": "Sample for GetDataStore", + "file": "discoveryengine_v1_generated_data_store_service_get_data_store_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_GetDataStore_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_get_data_store_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient", + "shortName": "DataStoreServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient.list_data_stores", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.ListDataStores", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "ListDataStores" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListDataStoresRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.data_store_service.pagers.ListDataStoresAsyncPager", + "shortName": "list_data_stores" + }, + "description": "Sample for ListDataStores", + "file": "discoveryengine_v1_generated_data_store_service_list_data_stores_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_ListDataStores_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_list_data_stores_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient", + "shortName": "DataStoreServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient.list_data_stores", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.ListDataStores", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "ListDataStores" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListDataStoresRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.data_store_service.pagers.ListDataStoresPager", + "shortName": "list_data_stores" + }, + "description": "Sample for ListDataStores", + "file": "discoveryengine_v1_generated_data_store_service_list_data_stores_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_ListDataStores_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_list_data_stores_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient", + "shortName": "DataStoreServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceAsyncClient.update_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.UpdateDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "UpdateDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateDataStoreRequest" + }, + { + "name": "data_store", + "type": "google.cloud.discoveryengine_v1.types.DataStore" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.DataStore", + "shortName": "update_data_store" + }, + "description": "Sample for UpdateDataStore", + "file": "discoveryengine_v1_generated_data_store_service_update_data_store_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_UpdateDataStore_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_update_data_store_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient", + "shortName": "DataStoreServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DataStoreServiceClient.update_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService.UpdateDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "UpdateDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateDataStoreRequest" + }, + { + "name": "data_store", + "type": "google.cloud.discoveryengine_v1.types.DataStore" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.DataStore", + "shortName": "update_data_store" + }, + "description": "Sample for UpdateDataStore", + "file": "discoveryengine_v1_generated_data_store_service_update_data_store_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DataStoreService_UpdateDataStore_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_data_store_service_update_data_store_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.create_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.CreateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.cloud.discoveryengine_v1.types.Document" + }, + { + "name": "document_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "discoveryengine_v1_generated_document_service_create_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_CreateDocument_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_create_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.create_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.CreateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.cloud.discoveryengine_v1.types.Document" + }, + { + "name": "document_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "discoveryengine_v1_generated_document_service_create_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_CreateDocument_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_create_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.delete_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.DeleteDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "discoveryengine_v1_generated_document_service_delete_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_DeleteDocument_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_delete_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.delete_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.DeleteDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "discoveryengine_v1_generated_document_service_delete_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_DeleteDocument_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_delete_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.get_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.GetDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "discoveryengine_v1_generated_document_service_get_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_GetDocument_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_get_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.get_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.GetDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "discoveryengine_v1_generated_document_service_get_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_GetDocument_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_get_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.import_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.ImportDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ImportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ImportDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_documents" + }, + "description": "Sample for ImportDocuments", + "file": "discoveryengine_v1_generated_document_service_import_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_ImportDocuments_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_import_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.import_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.ImportDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ImportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ImportDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_documents" + }, + "description": "Sample for ImportDocuments", + "file": "discoveryengine_v1_generated_document_service_import_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_ImportDocuments_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_import_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.list_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.ListDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.document_service.pagers.ListDocumentsAsyncPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "discoveryengine_v1_generated_document_service_list_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_ListDocuments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_list_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.list_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.ListDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.document_service.pagers.ListDocumentsPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "discoveryengine_v1_generated_document_service_list_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_ListDocuments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_list_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.purge_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "PurgeDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.PurgeDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "purge_documents" + }, + "description": "Sample for PurgeDocuments", + "file": "discoveryengine_v1_generated_document_service_purge_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_purge_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.purge_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "PurgeDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.PurgeDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "purge_documents" + }, + "description": "Sample for PurgeDocuments", + "file": "discoveryengine_v1_generated_document_service_purge_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_purge_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.update_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.UpdateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "discoveryengine_v1_generated_document_service_update_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_UpdateDocument_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_update_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.update_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.UpdateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Document", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "discoveryengine_v1_generated_document_service_update_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_UpdateDocument_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_update_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient", + "shortName": "EngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient.create_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.CreateEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "CreateEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CreateEngineRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "engine", + "type": "google.cloud.discoveryengine_v1.types.Engine" + }, + { + "name": "engine_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_engine" + }, + "description": "Sample for CreateEngine", + "file": "discoveryengine_v1_generated_engine_service_create_engine_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_CreateEngine_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_create_engine_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient", + "shortName": "EngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient.create_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.CreateEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "CreateEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CreateEngineRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "engine", + "type": "google.cloud.discoveryengine_v1.types.Engine" + }, + { + "name": "engine_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_engine" + }, + "description": "Sample for CreateEngine", + "file": "discoveryengine_v1_generated_engine_service_create_engine_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_CreateEngine_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_create_engine_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient", + "shortName": "EngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient.delete_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.DeleteEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "DeleteEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteEngineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_engine" + }, + "description": "Sample for DeleteEngine", + "file": "discoveryengine_v1_generated_engine_service_delete_engine_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_DeleteEngine_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_delete_engine_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient", + "shortName": "EngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient.delete_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.DeleteEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "DeleteEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteEngineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_engine" + }, + "description": "Sample for DeleteEngine", + "file": "discoveryengine_v1_generated_engine_service_delete_engine_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_DeleteEngine_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_delete_engine_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient", + "shortName": "EngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient.get_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.GetEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "GetEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetEngineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Engine", + "shortName": "get_engine" + }, + "description": "Sample for GetEngine", + "file": "discoveryengine_v1_generated_engine_service_get_engine_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_GetEngine_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_get_engine_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient", + "shortName": "EngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient.get_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.GetEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "GetEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetEngineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Engine", + "shortName": "get_engine" + }, + "description": "Sample for GetEngine", + "file": "discoveryengine_v1_generated_engine_service_get_engine_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_GetEngine_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_get_engine_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient", + "shortName": "EngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient.list_engines", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.ListEngines", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "ListEngines" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListEnginesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.engine_service.pagers.ListEnginesAsyncPager", + "shortName": "list_engines" + }, + "description": "Sample for ListEngines", + "file": "discoveryengine_v1_generated_engine_service_list_engines_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_ListEngines_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_list_engines_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient", + "shortName": "EngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient.list_engines", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.ListEngines", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "ListEngines" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListEnginesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.engine_service.pagers.ListEnginesPager", + "shortName": "list_engines" + }, + "description": "Sample for ListEngines", + "file": "discoveryengine_v1_generated_engine_service_list_engines_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_ListEngines_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_list_engines_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient", + "shortName": "EngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceAsyncClient.update_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.UpdateEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "UpdateEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateEngineRequest" + }, + { + "name": "engine", + "type": "google.cloud.discoveryengine_v1.types.Engine" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Engine", + "shortName": "update_engine" + }, + "description": "Sample for UpdateEngine", + "file": "discoveryengine_v1_generated_engine_service_update_engine_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_UpdateEngine_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_update_engine_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient", + "shortName": "EngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.EngineServiceClient.update_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1.EngineService.UpdateEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1.EngineService", + "shortName": "EngineService" + }, + "shortName": "UpdateEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateEngineRequest" + }, + { + "name": "engine", + "type": "google.cloud.discoveryengine_v1.types.Engine" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Engine", + "shortName": "update_engine" + }, + "description": "Sample for UpdateEngine", + "file": "discoveryengine_v1_generated_engine_service_update_engine_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_EngineService_UpdateEngine_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_engine_service_update_engine_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient.create_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.CreateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.cloud.discoveryengine_v1.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_schema" + }, + "description": "Sample for CreateSchema", + "file": "discoveryengine_v1_generated_schema_service_create_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_CreateSchema_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_create_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient.create_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.CreateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.cloud.discoveryengine_v1.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_schema" + }, + "description": "Sample for CreateSchema", + "file": "discoveryengine_v1_generated_schema_service_create_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_CreateSchema_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_create_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient.delete_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.DeleteSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_schema" + }, + "description": "Sample for DeleteSchema", + "file": "discoveryengine_v1_generated_schema_service_delete_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_DeleteSchema_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_delete_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient.delete_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.DeleteSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_schema" + }, + "description": "Sample for DeleteSchema", + "file": "discoveryengine_v1_generated_schema_service_delete_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_DeleteSchema_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_delete_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient.get_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.GetSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "discoveryengine_v1_generated_schema_service_get_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_GetSchema_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_get_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient.get_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.GetSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "discoveryengine_v1_generated_schema_service_get_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_GetSchema_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_get_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient.list_schemas", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.ListSchemas", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.schema_service.pagers.ListSchemasAsyncPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "discoveryengine_v1_generated_schema_service_list_schemas_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_ListSchemas_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_list_schemas_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient.list_schemas", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.ListSchemas", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.schema_service.pagers.ListSchemasPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "discoveryengine_v1_generated_schema_service_list_schemas_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_ListSchemas_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_list_schemas_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient.update_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.UpdateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "UpdateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateSchemaRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_schema" + }, + "description": "Sample for UpdateSchema", + "file": "discoveryengine_v1_generated_schema_service_update_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_UpdateSchema_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_update_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient.update_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService.UpdateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "UpdateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.UpdateSchemaRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_schema" + }, + "description": "Sample for UpdateSchema", + "file": "discoveryengine_v1_generated_schema_service_update_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SchemaService_UpdateSchema_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_schema_service_update_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SearchServiceAsyncClient", + "shortName": "SearchServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SearchServiceAsyncClient.search", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SearchService.Search", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SearchService", + "shortName": "SearchService" + }, + "shortName": "Search" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.SearchRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.search_service.pagers.SearchAsyncPager", + "shortName": "search" + }, + "description": "Sample for Search", + "file": "discoveryengine_v1_generated_search_service_search_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SearchService_Search_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_search_service_search_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SearchServiceClient", + "shortName": "SearchServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SearchServiceClient.search", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SearchService.Search", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SearchService", + "shortName": "SearchService" + }, + "shortName": "Search" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.SearchRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.services.search_service.pagers.SearchPager", + "shortName": "search" + }, + "description": "Sample for Search", + "file": "discoveryengine_v1_generated_search_service_search_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SearchService_Search_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_search_service_search_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.batch_create_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchCreateTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_create_target_sites" + }, + "description": "Sample for BatchCreateTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.batch_create_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchCreateTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_create_target_sites" + }, + "description": "Sample for BatchCreateTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.batch_verify_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchVerifyTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_verify_target_sites" + }, + "description": "Sample for BatchVerifyTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.batch_verify_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchVerifyTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_verify_target_sites" + }, + "description": "Sample for BatchVerifyTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.create_target_site", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.CreateTargetSite", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "CreateTargetSite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.CreateTargetSiteRequest" + }, + { + "name": "parent", + "type": "str" }, - "shortName": "ImportDocuments" - }, - "parameters": [ { - "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ImportDocumentsRequest" + "name": "target_site", + "type": "google.cloud.discoveryengine_v1.types.TargetSite" }, { "name": "retry", @@ -1677,21 +5541,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "import_documents" + "shortName": "create_target_site" }, - "description": "Sample for ImportDocuments", - "file": "discoveryengine_v1_generated_document_service_import_documents_async.py", + "description": "Sample for CreateTargetSite", + "file": "discoveryengine_v1_generated_site_search_engine_service_create_target_site_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_ImportDocuments_async", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_CreateTargetSite_async", "segments": [ { - "end": 55, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 59, "start": 27, "type": "SHORT" }, @@ -1701,43 +5565,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_import_documents_async.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_create_target_site_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.import_documents", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.create_target_site", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.ImportDocuments", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.CreateTargetSite", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "ImportDocuments" + "shortName": "CreateTargetSite" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ImportDocumentsRequest" + "type": "google.cloud.discoveryengine_v1.types.CreateTargetSiteRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "target_site", + "type": "google.cloud.discoveryengine_v1.types.TargetSite" }, { "name": "retry", @@ -1753,21 +5625,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "import_documents" + "shortName": "create_target_site" }, - "description": "Sample for ImportDocuments", - "file": "discoveryengine_v1_generated_document_service_import_documents_sync.py", + "description": "Sample for CreateTargetSite", + "file": "discoveryengine_v1_generated_site_search_engine_service_create_target_site_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_ImportDocuments_sync", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_CreateTargetSite_sync", "segments": [ { - "end": 55, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 59, "start": 27, "type": "SHORT" }, @@ -1777,47 +5649,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_import_documents_sync.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_create_target_site_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.list_documents", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.delete_target_site", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.ListDocuments", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.DeleteTargetSite", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "ListDocuments" + "shortName": "DeleteTargetSite" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ListDocumentsRequest" + "type": "google.cloud.discoveryengine_v1.types.DeleteTargetSiteRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1833,22 +5705,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.document_service.pagers.ListDocumentsAsyncPager", - "shortName": "list_documents" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_target_site" }, - "description": "Sample for ListDocuments", - "file": "discoveryengine_v1_generated_document_service_list_documents_async.py", + "description": "Sample for DeleteTargetSite", + "file": "discoveryengine_v1_generated_site_search_engine_service_delete_target_site_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_ListDocuments_async", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_DeleteTargetSite_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1863,41 +5735,41 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_list_documents_async.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_delete_target_site_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.list_documents", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.delete_target_site", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.ListDocuments", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.DeleteTargetSite", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "ListDocuments" + "shortName": "DeleteTargetSite" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ListDocumentsRequest" + "type": "google.cloud.discoveryengine_v1.types.DeleteTargetSiteRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1913,22 +5785,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.document_service.pagers.ListDocumentsPager", - "shortName": "list_documents" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_target_site" }, - "description": "Sample for ListDocuments", - "file": "discoveryengine_v1_generated_document_service_list_documents_sync.py", + "description": "Sample for DeleteTargetSite", + "file": "discoveryengine_v1_generated_site_search_engine_service_delete_target_site_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_ListDocuments_sync", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_DeleteTargetSite_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1943,39 +5815,39 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_list_documents_sync.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_delete_target_site_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.purge_documents", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.disable_advanced_site_search", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.DisableAdvancedSiteSearch", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "PurgeDocuments" + "shortName": "DisableAdvancedSiteSearch" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.PurgeDocumentsRequest" + "type": "google.cloud.discoveryengine_v1.types.DisableAdvancedSiteSearchRequest" }, { "name": "retry", @@ -1991,21 +5863,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "purge_documents" + "shortName": "disable_advanced_site_search" }, - "description": "Sample for PurgeDocuments", - "file": "discoveryengine_v1_generated_document_service_purge_documents_async.py", + "description": "Sample for DisableAdvancedSiteSearch", + "file": "discoveryengine_v1_generated_site_search_engine_service_disable_advanced_site_search_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_async", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_async", "segments": [ { - "end": 56, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2015,43 +5887,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_purge_documents_async.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_disable_advanced_site_search_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.purge_documents", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.disable_advanced_site_search", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.DisableAdvancedSiteSearch", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "PurgeDocuments" + "shortName": "DisableAdvancedSiteSearch" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.PurgeDocumentsRequest" + "type": "google.cloud.discoveryengine_v1.types.DisableAdvancedSiteSearchRequest" }, { "name": "retry", @@ -2067,21 +5939,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "purge_documents" + "shortName": "disable_advanced_site_search" }, - "description": "Sample for PurgeDocuments", - "file": "discoveryengine_v1_generated_document_service_purge_documents_sync.py", + "description": "Sample for DisableAdvancedSiteSearch", + "file": "discoveryengine_v1_generated_site_search_engine_service_disable_advanced_site_search_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_sync", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_sync", "segments": [ { - "end": 56, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2091,44 +5963,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_purge_documents_sync.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_disable_advanced_site_search_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.update_document", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.enable_advanced_site_search", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.UpdateDocument", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.EnableAdvancedSiteSearch", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "UpdateDocument" + "shortName": "EnableAdvancedSiteSearch" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.UpdateDocumentRequest" + "type": "google.cloud.discoveryengine_v1.types.EnableAdvancedSiteSearchRequest" }, { "name": "retry", @@ -2143,22 +6015,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Document", - "shortName": "update_document" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "enable_advanced_site_search" }, - "description": "Sample for UpdateDocument", - "file": "discoveryengine_v1_generated_document_service_update_document_async.py", + "description": "Sample for EnableAdvancedSiteSearch", + "file": "discoveryengine_v1_generated_site_search_engine_service_enable_advanced_site_search_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_UpdateDocument_async", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_async", "segments": [ { - "end": 50, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2168,43 +6040,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_update_document_async.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_enable_advanced_site_search_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.update_document", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.enable_advanced_site_search", "method": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService.UpdateDocument", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.EnableAdvancedSiteSearch", "service": { - "fullName": "google.cloud.discoveryengine.v1.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "UpdateDocument" + "shortName": "EnableAdvancedSiteSearch" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.UpdateDocumentRequest" + "type": "google.cloud.discoveryengine_v1.types.EnableAdvancedSiteSearchRequest" }, { "name": "retry", @@ -2219,22 +6091,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Document", - "shortName": "update_document" + "resultType": "google.api_core.operation.Operation", + "shortName": "enable_advanced_site_search" }, - "description": "Sample for UpdateDocument", - "file": "discoveryengine_v1_generated_document_service_update_document_sync.py", + "description": "Sample for EnableAdvancedSiteSearch", + "file": "discoveryengine_v1_generated_site_search_engine_service_enable_advanced_site_search_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_DocumentService_UpdateDocument_sync", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_sync", "segments": [ { - "end": 50, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2244,56 +6116,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_document_service_update_document_sync.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_enable_advanced_site_search_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient.create_schema", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.fetch_domain_verification_status", "method": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService.CreateSchema", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.FetchDomainVerificationStatus", "service": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "CreateSchema" + "shortName": "FetchDomainVerificationStatus" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.CreateSchemaRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "schema", - "type": "google.cloud.discoveryengine_v1.types.Schema" - }, - { - "name": "schema_id", - "type": "str" + "type": "google.cloud.discoveryengine_v1.types.FetchDomainVerificationStatusRequest" }, { "name": "retry", @@ -2308,22 +6168,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_schema" + "resultType": "google.cloud.discoveryengine_v1.services.site_search_engine_service.pagers.FetchDomainVerificationStatusAsyncPager", + "shortName": "fetch_domain_verification_status" }, - "description": "Sample for CreateSchema", - "file": "discoveryengine_v1_generated_schema_service_create_schema_async.py", + "description": "Sample for FetchDomainVerificationStatus", + "file": "discoveryengine_v1_generated_site_search_engine_service_fetch_domain_verification_status_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SchemaService_CreateSchema_async", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_FetchDomainVerificationStatus_async", "segments": [ { - "end": 56, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2333,55 +6193,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, - { - "end": 57, - "start": 54, + { + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_schema_service_create_schema_async.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_fetch_domain_verification_status_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient.create_schema", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.fetch_domain_verification_status", "method": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService.CreateSchema", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.FetchDomainVerificationStatus", "service": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "CreateSchema" + "shortName": "FetchDomainVerificationStatus" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.CreateSchemaRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "schema", - "type": "google.cloud.discoveryengine_v1.types.Schema" - }, - { - "name": "schema_id", - "type": "str" + "type": "google.cloud.discoveryengine_v1.types.FetchDomainVerificationStatusRequest" }, { "name": "retry", @@ -2396,22 +6244,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_schema" + "resultType": "google.cloud.discoveryengine_v1.services.site_search_engine_service.pagers.FetchDomainVerificationStatusPager", + "shortName": "fetch_domain_verification_status" }, - "description": "Sample for CreateSchema", - "file": "discoveryengine_v1_generated_schema_service_create_schema_sync.py", + "description": "Sample for FetchDomainVerificationStatus", + "file": "discoveryengine_v1_generated_site_search_engine_service_fetch_domain_verification_status_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SchemaService_CreateSchema_sync", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_FetchDomainVerificationStatus_sync", "segments": [ { - "end": 56, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2421,44 +6269,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_schema_service_create_schema_sync.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_fetch_domain_verification_status_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient.delete_schema", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.get_site_search_engine", "method": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService.DeleteSchema", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.GetSiteSearchEngine", "service": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "DeleteSchema" + "shortName": "GetSiteSearchEngine" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.DeleteSchemaRequest" + "type": "google.cloud.discoveryengine_v1.types.GetSiteSearchEngineRequest" }, { "name": "name", @@ -2477,22 +6325,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_schema" + "resultType": "google.cloud.discoveryengine_v1.types.SiteSearchEngine", + "shortName": "get_site_search_engine" }, - "description": "Sample for DeleteSchema", - "file": "discoveryengine_v1_generated_schema_service_delete_schema_async.py", + "description": "Sample for GetSiteSearchEngine", + "file": "discoveryengine_v1_generated_site_search_engine_service_get_site_search_engine_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SchemaService_DeleteSchema_async", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_GetSiteSearchEngine_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2507,38 +6355,38 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_schema_service_delete_schema_async.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_get_site_search_engine_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient.delete_schema", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.get_site_search_engine", "method": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService.DeleteSchema", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.GetSiteSearchEngine", "service": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "DeleteSchema" + "shortName": "GetSiteSearchEngine" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.DeleteSchemaRequest" + "type": "google.cloud.discoveryengine_v1.types.GetSiteSearchEngineRequest" }, { "name": "name", @@ -2557,22 +6405,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_schema" + "resultType": "google.cloud.discoveryengine_v1.types.SiteSearchEngine", + "shortName": "get_site_search_engine" }, - "description": "Sample for DeleteSchema", - "file": "discoveryengine_v1_generated_schema_service_delete_schema_sync.py", + "description": "Sample for GetSiteSearchEngine", + "file": "discoveryengine_v1_generated_site_search_engine_service_get_site_search_engine_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SchemaService_DeleteSchema_sync", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_GetSiteSearchEngine_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2587,39 +6435,39 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_schema_service_delete_schema_sync.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_get_site_search_engine_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient.get_schema", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.get_target_site", "method": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService.GetSchema", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.GetTargetSite", "service": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "GetSchema" + "shortName": "GetTargetSite" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.GetSchemaRequest" + "type": "google.cloud.discoveryengine_v1.types.GetTargetSiteRequest" }, { "name": "name", @@ -2638,14 +6486,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Schema", - "shortName": "get_schema" + "resultType": "google.cloud.discoveryengine_v1.types.TargetSite", + "shortName": "get_target_site" }, - "description": "Sample for GetSchema", - "file": "discoveryengine_v1_generated_schema_service_get_schema_async.py", + "description": "Sample for GetTargetSite", + "file": "discoveryengine_v1_generated_site_search_engine_service_get_target_site_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SchemaService_GetSchema_async", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_GetTargetSite_async", "segments": [ { "end": 51, @@ -2678,28 +6526,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_schema_service_get_schema_async.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_get_target_site_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient.get_schema", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.get_target_site", "method": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService.GetSchema", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.GetTargetSite", "service": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "GetSchema" + "shortName": "GetTargetSite" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.GetSchemaRequest" + "type": "google.cloud.discoveryengine_v1.types.GetTargetSiteRequest" }, { "name": "name", @@ -2718,14 +6566,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.types.Schema", - "shortName": "get_schema" + "resultType": "google.cloud.discoveryengine_v1.types.TargetSite", + "shortName": "get_target_site" }, - "description": "Sample for GetSchema", - "file": "discoveryengine_v1_generated_schema_service_get_schema_sync.py", + "description": "Sample for GetTargetSite", + "file": "discoveryengine_v1_generated_site_search_engine_service_get_target_site_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SchemaService_GetSchema_sync", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_GetTargetSite_sync", "segments": [ { "end": 51, @@ -2758,29 +6606,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_schema_service_get_schema_sync.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_get_target_site_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient.list_schemas", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.list_target_sites", "method": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService.ListSchemas", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.ListTargetSites", "service": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "ListSchemas" + "shortName": "ListTargetSites" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ListSchemasRequest" + "type": "google.cloud.discoveryengine_v1.types.ListTargetSitesRequest" }, { "name": "parent", @@ -2799,14 +6647,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.schema_service.pagers.ListSchemasAsyncPager", - "shortName": "list_schemas" + "resultType": "google.cloud.discoveryengine_v1.services.site_search_engine_service.pagers.ListTargetSitesAsyncPager", + "shortName": "list_target_sites" }, - "description": "Sample for ListSchemas", - "file": "discoveryengine_v1_generated_schema_service_list_schemas_async.py", + "description": "Sample for ListTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_list_target_sites_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SchemaService_ListSchemas_async", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_ListTargetSites_async", "segments": [ { "end": 52, @@ -2839,28 +6687,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_schema_service_list_schemas_async.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_list_target_sites_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient.list_schemas", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.list_target_sites", "method": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService.ListSchemas", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.ListTargetSites", "service": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "ListSchemas" + "shortName": "ListTargetSites" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.ListSchemasRequest" + "type": "google.cloud.discoveryengine_v1.types.ListTargetSitesRequest" }, { "name": "parent", @@ -2879,14 +6727,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.schema_service.pagers.ListSchemasPager", - "shortName": "list_schemas" + "resultType": "google.cloud.discoveryengine_v1.services.site_search_engine_service.pagers.ListTargetSitesPager", + "shortName": "list_target_sites" }, - "description": "Sample for ListSchemas", - "file": "discoveryengine_v1_generated_schema_service_list_schemas_sync.py", + "description": "Sample for ListTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_list_target_sites_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SchemaService_ListSchemas_sync", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_ListTargetSites_sync", "segments": [ { "end": 52, @@ -2919,29 +6767,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_schema_service_list_schemas_sync.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_list_target_sites_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceAsyncClient.update_schema", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.recrawl_uris", "method": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService.UpdateSchema", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.RecrawlUris", "service": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "UpdateSchema" + "shortName": "RecrawlUris" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.UpdateSchemaRequest" + "type": "google.cloud.discoveryengine_v1.types.RecrawlUrisRequest" }, { "name": "retry", @@ -2957,21 +6805,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_schema" + "shortName": "recrawl_uris" }, - "description": "Sample for UpdateSchema", - "file": "discoveryengine_v1_generated_schema_service_update_schema_async.py", + "description": "Sample for RecrawlUris", + "file": "discoveryengine_v1_generated_site_search_engine_service_recrawl_uris_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SchemaService_UpdateSchema_async", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_RecrawlUris_async", "segments": [ { - "end": 54, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 56, "start": 27, "type": "SHORT" }, @@ -2981,43 +6829,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_schema_service_update_schema_async.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_recrawl_uris_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.SchemaServiceClient.update_schema", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.recrawl_uris", "method": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService.UpdateSchema", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.RecrawlUris", "service": { - "fullName": "google.cloud.discoveryengine.v1.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "UpdateSchema" + "shortName": "RecrawlUris" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.UpdateSchemaRequest" + "type": "google.cloud.discoveryengine_v1.types.RecrawlUrisRequest" }, { "name": "retry", @@ -3033,21 +6881,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "update_schema" + "shortName": "recrawl_uris" }, - "description": "Sample for UpdateSchema", - "file": "discoveryengine_v1_generated_schema_service_update_schema_sync.py", + "description": "Sample for RecrawlUris", + "file": "discoveryengine_v1_generated_site_search_engine_service_recrawl_uris_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SchemaService_UpdateSchema_sync", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_RecrawlUris_sync", "segments": [ { - "end": 54, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 56, "start": 27, "type": "SHORT" }, @@ -3057,44 +6905,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_schema_service_update_schema_sync.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_recrawl_uris_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.SearchServiceAsyncClient", - "shortName": "SearchServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.SearchServiceAsyncClient.search", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.update_target_site", "method": { - "fullName": "google.cloud.discoveryengine.v1.SearchService.Search", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.UpdateTargetSite", "service": { - "fullName": "google.cloud.discoveryengine.v1.SearchService", - "shortName": "SearchService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "Search" + "shortName": "UpdateTargetSite" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.SearchRequest" + "type": "google.cloud.discoveryengine_v1.types.UpdateTargetSiteRequest" + }, + { + "name": "target_site", + "type": "google.cloud.discoveryengine_v1.types.TargetSite" }, { "name": "retry", @@ -3109,22 +6961,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.search_service.pagers.SearchAsyncPager", - "shortName": "search" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_target_site" }, - "description": "Sample for Search", - "file": "discoveryengine_v1_generated_search_service_search_async.py", + "description": "Sample for UpdateTargetSite", + "file": "discoveryengine_v1_generated_site_search_engine_service_update_target_site_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SearchService_Search_async", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_UpdateTargetSite_async", "segments": [ { - "end": 52, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 58, "start": 27, "type": "SHORT" }, @@ -3134,43 +6986,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 55, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_search_service_search_async.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_update_target_site_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.SearchServiceClient", - "shortName": "SearchServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.SearchServiceClient.search", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.update_target_site", "method": { - "fullName": "google.cloud.discoveryengine.v1.SearchService.Search", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.UpdateTargetSite", "service": { - "fullName": "google.cloud.discoveryengine.v1.SearchService", - "shortName": "SearchService" + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "Search" + "shortName": "UpdateTargetSite" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.SearchRequest" + "type": "google.cloud.discoveryengine_v1.types.UpdateTargetSiteRequest" + }, + { + "name": "target_site", + "type": "google.cloud.discoveryengine_v1.types.TargetSite" }, { "name": "retry", @@ -3185,22 +7041,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1.services.search_service.pagers.SearchPager", - "shortName": "search" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_target_site" }, - "description": "Sample for Search", - "file": "discoveryengine_v1_generated_search_service_search_sync.py", + "description": "Sample for UpdateTargetSite", + "file": "discoveryengine_v1_generated_site_search_engine_service_update_target_site_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SearchService_Search_sync", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_UpdateTargetSite_sync", "segments": [ { - "end": 52, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 58, "start": 27, "type": "SHORT" }, @@ -3210,22 +7066,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 55, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_search_service_search_sync.py" + "title": "discoveryengine_v1_generated_site_search_engine_service_update_target_site_sync.py" }, { "canonical": true, diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json index 0ed4a903c29d..77b380b21703 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.11.7" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json index 49d6b388993d..94441eb242b9 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.11.7" + "version": "0.1.0" }, "snippets": [ { @@ -169,30 +169,22 @@ "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient", - "shortName": "ConversationalSearchServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.CompletionServiceAsyncClient", + "shortName": "CompletionServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient.converse_conversation", + "fullName": "google.cloud.discoveryengine_v1beta.CompletionServiceAsyncClient.import_suggestion_deny_list_entries", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.ConverseConversation", + "fullName": "google.cloud.discoveryengine.v1beta.CompletionService.ImportSuggestionDenyListEntries", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1beta.CompletionService", + "shortName": "CompletionService" }, - "shortName": "ConverseConversation" + "shortName": "ImportSuggestionDenyListEntries" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.ConverseConversationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "query", - "type": "google.cloud.discoveryengine_v1beta.types.TextInput" + "type": "google.cloud.discoveryengine_v1beta.types.ImportSuggestionDenyListEntriesRequest" }, { "name": "retry", @@ -207,22 +199,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.ConverseConversationResponse", - "shortName": "converse_conversation" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_suggestion_deny_list_entries" }, - "description": "Sample for ConverseConversation", - "file": "discoveryengine_v1beta_generated_conversational_search_service_converse_conversation_async.py", + "description": "Sample for ImportSuggestionDenyListEntries", + "file": "discoveryengine_v1beta_generated_completion_service_import_suggestion_deny_list_entries_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_ConverseConversation_async", + "regionTag": "discoveryengine_v1beta_generated_CompletionService_ImportSuggestionDenyListEntries_async", "segments": [ { - "end": 51, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 60, "start": 27, "type": "SHORT" }, @@ -232,51 +224,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_conversational_search_service_converse_conversation_async.py" + "title": "discoveryengine_v1beta_generated_completion_service_import_suggestion_deny_list_entries_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient", - "shortName": "ConversationalSearchServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.CompletionServiceClient", + "shortName": "CompletionServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient.converse_conversation", + "fullName": "google.cloud.discoveryengine_v1beta.CompletionServiceClient.import_suggestion_deny_list_entries", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.ConverseConversation", + "fullName": "google.cloud.discoveryengine.v1beta.CompletionService.ImportSuggestionDenyListEntries", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1beta.CompletionService", + "shortName": "CompletionService" }, - "shortName": "ConverseConversation" + "shortName": "ImportSuggestionDenyListEntries" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.ConverseConversationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "query", - "type": "google.cloud.discoveryengine_v1beta.types.TextInput" + "type": "google.cloud.discoveryengine_v1beta.types.ImportSuggestionDenyListEntriesRequest" }, { "name": "retry", @@ -291,22 +275,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.ConverseConversationResponse", - "shortName": "converse_conversation" + "resultType": "google.api_core.operation.Operation", + "shortName": "import_suggestion_deny_list_entries" }, - "description": "Sample for ConverseConversation", - "file": "discoveryengine_v1beta_generated_conversational_search_service_converse_conversation_sync.py", + "description": "Sample for ImportSuggestionDenyListEntries", + "file": "discoveryengine_v1beta_generated_completion_service_import_suggestion_deny_list_entries_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_ConverseConversation_sync", + "regionTag": "discoveryengine_v1beta_generated_CompletionService_ImportSuggestionDenyListEntries_sync", "segments": [ { - "end": 51, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 60, "start": 27, "type": "SHORT" }, @@ -316,52 +300,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_conversational_search_service_converse_conversation_sync.py" + "title": "discoveryengine_v1beta_generated_completion_service_import_suggestion_deny_list_entries_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient", - "shortName": "ConversationalSearchServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.CompletionServiceAsyncClient", + "shortName": "CompletionServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient.create_conversation", + "fullName": "google.cloud.discoveryengine_v1beta.CompletionServiceAsyncClient.purge_suggestion_deny_list_entries", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.CreateConversation", + "fullName": "google.cloud.discoveryengine.v1beta.CompletionService.PurgeSuggestionDenyListEntries", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1beta.CompletionService", + "shortName": "CompletionService" }, - "shortName": "CreateConversation" + "shortName": "PurgeSuggestionDenyListEntries" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.CreateConversationRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "conversation", - "type": "google.cloud.discoveryengine_v1beta.types.Conversation" + "type": "google.cloud.discoveryengine_v1beta.types.PurgeSuggestionDenyListEntriesRequest" }, { "name": "retry", @@ -376,22 +352,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.Conversation", - "shortName": "create_conversation" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "purge_suggestion_deny_list_entries" }, - "description": "Sample for CreateConversation", - "file": "discoveryengine_v1beta_generated_conversational_search_service_create_conversation_async.py", + "description": "Sample for PurgeSuggestionDenyListEntries", + "file": "discoveryengine_v1beta_generated_completion_service_purge_suggestion_deny_list_entries_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_CreateConversation_async", + "regionTag": "discoveryengine_v1beta_generated_CompletionService_PurgeSuggestionDenyListEntries_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -406,46 +382,38 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_conversational_search_service_create_conversation_async.py" + "title": "discoveryengine_v1beta_generated_completion_service_purge_suggestion_deny_list_entries_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient", - "shortName": "ConversationalSearchServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.CompletionServiceClient", + "shortName": "CompletionServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient.create_conversation", + "fullName": "google.cloud.discoveryengine_v1beta.CompletionServiceClient.purge_suggestion_deny_list_entries", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.CreateConversation", + "fullName": "google.cloud.discoveryengine.v1beta.CompletionService.PurgeSuggestionDenyListEntries", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", - "shortName": "ConversationalSearchService" + "fullName": "google.cloud.discoveryengine.v1beta.CompletionService", + "shortName": "CompletionService" }, - "shortName": "CreateConversation" + "shortName": "PurgeSuggestionDenyListEntries" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.CreateConversationRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "conversation", - "type": "google.cloud.discoveryengine_v1beta.types.Conversation" + "type": "google.cloud.discoveryengine_v1beta.types.PurgeSuggestionDenyListEntriesRequest" }, { "name": "retry", @@ -460,22 +428,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.Conversation", - "shortName": "create_conversation" + "resultType": "google.api_core.operation.Operation", + "shortName": "purge_suggestion_deny_list_entries" }, - "description": "Sample for CreateConversation", - "file": "discoveryengine_v1beta_generated_conversational_search_service_create_conversation_sync.py", + "description": "Sample for PurgeSuggestionDenyListEntries", + "file": "discoveryengine_v1beta_generated_completion_service_purge_suggestion_deny_list_entries_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_CreateConversation_sync", + "regionTag": "discoveryengine_v1beta_generated_CompletionService_PurgeSuggestionDenyListEntries_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -490,17 +458,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_conversational_search_service_create_conversation_sync.py" + "title": "discoveryengine_v1beta_generated_completion_service_purge_suggestion_deny_list_entries_sync.py" }, { "canonical": true, @@ -510,24 +478,28 @@ "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient", "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient.delete_conversation", + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient.converse_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.DeleteConversation", + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.ConverseConversation", "service": { "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "DeleteConversation" + "shortName": "ConverseConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.DeleteConversationRequest" + "type": "google.cloud.discoveryengine_v1beta.types.ConverseConversationRequest" }, { "name": "name", "type": "str" }, + { + "name": "query", + "type": "google.cloud.discoveryengine_v1beta.types.TextInput" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -541,21 +513,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_conversation" + "resultType": "google.cloud.discoveryengine_v1beta.types.ConverseConversationResponse", + "shortName": "converse_conversation" }, - "description": "Sample for DeleteConversation", - "file": "discoveryengine_v1beta_generated_conversational_search_service_delete_conversation_async.py", + "description": "Sample for ConverseConversation", + "file": "discoveryengine_v1beta_generated_conversational_search_service_converse_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_DeleteConversation_async", + "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_ConverseConversation_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -570,15 +543,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_conversational_search_service_delete_conversation_async.py" + "title": "discoveryengine_v1beta_generated_conversational_search_service_converse_conversation_async.py" }, { "canonical": true, @@ -587,24 +562,28 @@ "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient", "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient.delete_conversation", + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient.converse_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.DeleteConversation", + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.ConverseConversation", "service": { "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "DeleteConversation" + "shortName": "ConverseConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.DeleteConversationRequest" + "type": "google.cloud.discoveryengine_v1beta.types.ConverseConversationRequest" }, { "name": "name", "type": "str" }, + { + "name": "query", + "type": "google.cloud.discoveryengine_v1beta.types.TextInput" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -618,21 +597,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_conversation" + "resultType": "google.cloud.discoveryengine_v1beta.types.ConverseConversationResponse", + "shortName": "converse_conversation" }, - "description": "Sample for DeleteConversation", - "file": "discoveryengine_v1beta_generated_conversational_search_service_delete_conversation_sync.py", + "description": "Sample for ConverseConversation", + "file": "discoveryengine_v1beta_generated_conversational_search_service_converse_conversation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_DeleteConversation_sync", + "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_ConverseConversation_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -647,15 +627,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_conversational_search_service_delete_conversation_sync.py" + "title": "discoveryengine_v1beta_generated_conversational_search_service_converse_conversation_sync.py" }, { "canonical": true, @@ -665,24 +647,28 @@ "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient", "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient.get_conversation", + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient.create_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.GetConversation", + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.CreateConversation", "service": { "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "GetConversation" + "shortName": "CreateConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.GetConversationRequest" + "type": "google.cloud.discoveryengine_v1beta.types.CreateConversationRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "conversation", + "type": "google.cloud.discoveryengine_v1beta.types.Conversation" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -697,13 +683,13 @@ } ], "resultType": "google.cloud.discoveryengine_v1beta.types.Conversation", - "shortName": "get_conversation" + "shortName": "create_conversation" }, - "description": "Sample for GetConversation", - "file": "discoveryengine_v1beta_generated_conversational_search_service_get_conversation_async.py", + "description": "Sample for CreateConversation", + "file": "discoveryengine_v1beta_generated_conversational_search_service_create_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_GetConversation_async", + "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_CreateConversation_async", "segments": [ { "end": 51, @@ -736,7 +722,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_conversational_search_service_get_conversation_async.py" + "title": "discoveryengine_v1beta_generated_conversational_search_service_create_conversation_async.py" }, { "canonical": true, @@ -745,24 +731,28 @@ "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient", "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient.get_conversation", + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient.create_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.GetConversation", + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.CreateConversation", "service": { "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "GetConversation" + "shortName": "CreateConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.GetConversationRequest" + "type": "google.cloud.discoveryengine_v1beta.types.CreateConversationRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "conversation", + "type": "google.cloud.discoveryengine_v1beta.types.Conversation" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -777,13 +767,13 @@ } ], "resultType": "google.cloud.discoveryengine_v1beta.types.Conversation", - "shortName": "get_conversation" + "shortName": "create_conversation" }, - "description": "Sample for GetConversation", - "file": "discoveryengine_v1beta_generated_conversational_search_service_get_conversation_sync.py", + "description": "Sample for CreateConversation", + "file": "discoveryengine_v1beta_generated_conversational_search_service_create_conversation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_GetConversation_sync", + "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_CreateConversation_sync", "segments": [ { "end": 51, @@ -816,7 +806,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_conversational_search_service_get_conversation_sync.py" + "title": "discoveryengine_v1beta_generated_conversational_search_service_create_conversation_sync.py" }, { "canonical": true, @@ -826,22 +816,22 @@ "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient", "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient.list_conversations", + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient.delete_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.ListConversations", + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.DeleteConversation", "service": { "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "ListConversations" + "shortName": "DeleteConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.ListConversationsRequest" + "type": "google.cloud.discoveryengine_v1beta.types.DeleteConversationRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -857,22 +847,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.services.conversational_search_service.pagers.ListConversationsAsyncPager", - "shortName": "list_conversations" + "shortName": "delete_conversation" }, - "description": "Sample for ListConversations", - "file": "discoveryengine_v1beta_generated_conversational_search_service_list_conversations_async.py", + "description": "Sample for DeleteConversation", + "file": "discoveryengine_v1beta_generated_conversational_search_service_delete_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_ListConversations_async", + "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_DeleteConversation_async", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -887,17 +876,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_conversational_search_service_list_conversations_async.py" + "title": "discoveryengine_v1beta_generated_conversational_search_service_delete_conversation_async.py" }, { "canonical": true, @@ -906,22 +893,22 @@ "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient", "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient.list_conversations", + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient.delete_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.ListConversations", + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.DeleteConversation", "service": { "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "ListConversations" + "shortName": "DeleteConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.ListConversationsRequest" + "type": "google.cloud.discoveryengine_v1beta.types.DeleteConversationRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -937,22 +924,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.services.conversational_search_service.pagers.ListConversationsPager", - "shortName": "list_conversations" + "shortName": "delete_conversation" }, - "description": "Sample for ListConversations", - "file": "discoveryengine_v1beta_generated_conversational_search_service_list_conversations_sync.py", + "description": "Sample for DeleteConversation", + "file": "discoveryengine_v1beta_generated_conversational_search_service_delete_conversation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_ListConversations_sync", + "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_DeleteConversation_sync", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -967,17 +953,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_conversational_search_service_list_conversations_sync.py" + "title": "discoveryengine_v1beta_generated_conversational_search_service_delete_conversation_sync.py" }, { "canonical": true, @@ -987,27 +971,23 @@ "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient", "shortName": "ConversationalSearchServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient.update_conversation", + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient.get_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.UpdateConversation", + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.GetConversation", "service": { "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "UpdateConversation" + "shortName": "GetConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.UpdateConversationRequest" - }, - { - "name": "conversation", - "type": "google.cloud.discoveryengine_v1beta.types.Conversation" + "type": "google.cloud.discoveryengine_v1beta.types.GetConversationRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -1023,21 +1003,21 @@ } ], "resultType": "google.cloud.discoveryengine_v1beta.types.Conversation", - "shortName": "update_conversation" + "shortName": "get_conversation" }, - "description": "Sample for UpdateConversation", - "file": "discoveryengine_v1beta_generated_conversational_search_service_update_conversation_async.py", + "description": "Sample for GetConversation", + "file": "discoveryengine_v1beta_generated_conversational_search_service_get_conversation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_UpdateConversation_async", + "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_GetConversation_async", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1047,22 +1027,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_conversational_search_service_update_conversation_async.py" + "title": "discoveryengine_v1beta_generated_conversational_search_service_get_conversation_async.py" }, { "canonical": true, @@ -1071,27 +1051,4404 @@ "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient", "shortName": "ConversationalSearchServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient.update_conversation", + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient.get_conversation", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.UpdateConversation", + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.GetConversation", "service": { "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", "shortName": "ConversationalSearchService" }, - "shortName": "UpdateConversation" + "shortName": "GetConversation" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.UpdateConversationRequest" + "type": "google.cloud.discoveryengine_v1beta.types.GetConversationRequest" }, { - "name": "conversation", - "type": "google.cloud.discoveryengine_v1beta.types.Conversation" + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Conversation", + "shortName": "get_conversation" + }, + "description": "Sample for GetConversation", + "file": "discoveryengine_v1beta_generated_conversational_search_service_get_conversation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_GetConversation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_conversational_search_service_get_conversation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient.list_conversations", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.ListConversations", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", + "shortName": "ConversationalSearchService" + }, + "shortName": "ListConversations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListConversationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.conversational_search_service.pagers.ListConversationsAsyncPager", + "shortName": "list_conversations" + }, + "description": "Sample for ListConversations", + "file": "discoveryengine_v1beta_generated_conversational_search_service_list_conversations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_ListConversations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_conversational_search_service_list_conversations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient.list_conversations", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.ListConversations", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", + "shortName": "ConversationalSearchService" + }, + "shortName": "ListConversations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListConversationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.conversational_search_service.pagers.ListConversationsPager", + "shortName": "list_conversations" + }, + "description": "Sample for ListConversations", + "file": "discoveryengine_v1beta_generated_conversational_search_service_list_conversations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_ListConversations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_conversational_search_service_list_conversations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient", + "shortName": "ConversationalSearchServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceAsyncClient.update_conversation", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.UpdateConversation", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", + "shortName": "ConversationalSearchService" + }, + "shortName": "UpdateConversation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateConversationRequest" + }, + { + "name": "conversation", + "type": "google.cloud.discoveryengine_v1beta.types.Conversation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Conversation", + "shortName": "update_conversation" + }, + "description": "Sample for UpdateConversation", + "file": "discoveryengine_v1beta_generated_conversational_search_service_update_conversation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_UpdateConversation_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_conversational_search_service_update_conversation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient", + "shortName": "ConversationalSearchServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ConversationalSearchServiceClient.update_conversation", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService.UpdateConversation", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ConversationalSearchService", + "shortName": "ConversationalSearchService" + }, + "shortName": "UpdateConversation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateConversationRequest" + }, + { + "name": "conversation", + "type": "google.cloud.discoveryengine_v1beta.types.Conversation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Conversation", + "shortName": "update_conversation" + }, + "description": "Sample for UpdateConversation", + "file": "discoveryengine_v1beta_generated_conversational_search_service_update_conversation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_UpdateConversation_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_conversational_search_service_update_conversation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceAsyncClient", + "shortName": "DataStoreServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceAsyncClient.create_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService.CreateDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "CreateDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.CreateDataStoreRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_store", + "type": "google.cloud.discoveryengine_v1beta.types.DataStore" + }, + { + "name": "data_store_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_data_store" + }, + "description": "Sample for CreateDataStore", + "file": "discoveryengine_v1beta_generated_data_store_service_create_data_store_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DataStoreService_CreateDataStore_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_data_store_service_create_data_store_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceClient", + "shortName": "DataStoreServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceClient.create_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService.CreateDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "CreateDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.CreateDataStoreRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_store", + "type": "google.cloud.discoveryengine_v1beta.types.DataStore" + }, + { + "name": "data_store_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_data_store" + }, + "description": "Sample for CreateDataStore", + "file": "discoveryengine_v1beta_generated_data_store_service_create_data_store_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DataStoreService_CreateDataStore_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_data_store_service_create_data_store_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceAsyncClient", + "shortName": "DataStoreServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceAsyncClient.delete_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService.DeleteDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "DeleteDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.DeleteDataStoreRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_data_store" + }, + "description": "Sample for DeleteDataStore", + "file": "discoveryengine_v1beta_generated_data_store_service_delete_data_store_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DataStoreService_DeleteDataStore_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_data_store_service_delete_data_store_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceClient", + "shortName": "DataStoreServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceClient.delete_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService.DeleteDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "DeleteDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.DeleteDataStoreRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_data_store" + }, + "description": "Sample for DeleteDataStore", + "file": "discoveryengine_v1beta_generated_data_store_service_delete_data_store_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DataStoreService_DeleteDataStore_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_data_store_service_delete_data_store_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceAsyncClient", + "shortName": "DataStoreServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceAsyncClient.get_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService.GetDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "GetDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.GetDataStoreRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.DataStore", + "shortName": "get_data_store" + }, + "description": "Sample for GetDataStore", + "file": "discoveryengine_v1beta_generated_data_store_service_get_data_store_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DataStoreService_GetDataStore_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_data_store_service_get_data_store_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceClient", + "shortName": "DataStoreServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceClient.get_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService.GetDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "GetDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.GetDataStoreRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.DataStore", + "shortName": "get_data_store" + }, + "description": "Sample for GetDataStore", + "file": "discoveryengine_v1beta_generated_data_store_service_get_data_store_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DataStoreService_GetDataStore_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_data_store_service_get_data_store_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceAsyncClient", + "shortName": "DataStoreServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceAsyncClient.list_data_stores", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService.ListDataStores", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "ListDataStores" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListDataStoresRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.data_store_service.pagers.ListDataStoresAsyncPager", + "shortName": "list_data_stores" + }, + "description": "Sample for ListDataStores", + "file": "discoveryengine_v1beta_generated_data_store_service_list_data_stores_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DataStoreService_ListDataStores_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_data_store_service_list_data_stores_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceClient", + "shortName": "DataStoreServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceClient.list_data_stores", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService.ListDataStores", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "ListDataStores" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListDataStoresRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.data_store_service.pagers.ListDataStoresPager", + "shortName": "list_data_stores" + }, + "description": "Sample for ListDataStores", + "file": "discoveryengine_v1beta_generated_data_store_service_list_data_stores_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DataStoreService_ListDataStores_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_data_store_service_list_data_stores_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceAsyncClient", + "shortName": "DataStoreServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceAsyncClient.update_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService.UpdateDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "UpdateDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateDataStoreRequest" + }, + { + "name": "data_store", + "type": "google.cloud.discoveryengine_v1beta.types.DataStore" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.DataStore", + "shortName": "update_data_store" + }, + "description": "Sample for UpdateDataStore", + "file": "discoveryengine_v1beta_generated_data_store_service_update_data_store_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DataStoreService_UpdateDataStore_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_data_store_service_update_data_store_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceClient", + "shortName": "DataStoreServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DataStoreServiceClient.update_data_store", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService.UpdateDataStore", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DataStoreService", + "shortName": "DataStoreService" + }, + "shortName": "UpdateDataStore" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateDataStoreRequest" + }, + { + "name": "data_store", + "type": "google.cloud.discoveryengine_v1beta.types.DataStore" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.DataStore", + "shortName": "update_data_store" + }, + "description": "Sample for UpdateDataStore", + "file": "discoveryengine_v1beta_generated_data_store_service_update_data_store_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DataStoreService_UpdateDataStore_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_data_store_service_update_data_store_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.create_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.CreateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.cloud.discoveryengine_v1beta.types.Document" + }, + { + "name": "document_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Document", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "discoveryengine_v1beta_generated_document_service_create_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_CreateDocument_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_create_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.create_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.CreateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.cloud.discoveryengine_v1beta.types.Document" + }, + { + "name": "document_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Document", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "discoveryengine_v1beta_generated_document_service_create_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_CreateDocument_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_create_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.delete_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.DeleteDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "discoveryengine_v1beta_generated_document_service_delete_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_DeleteDocument_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_delete_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.delete_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.DeleteDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "discoveryengine_v1beta_generated_document_service_delete_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_DeleteDocument_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_delete_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.get_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.GetDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "discoveryengine_v1beta_generated_document_service_get_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_GetDocument_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_get_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.get_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.GetDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "discoveryengine_v1beta_generated_document_service_get_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_GetDocument_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_get_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.import_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.ImportDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ImportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ImportDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_documents" + }, + "description": "Sample for ImportDocuments", + "file": "discoveryengine_v1beta_generated_document_service_import_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_ImportDocuments_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_import_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.import_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.ImportDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ImportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ImportDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_documents" + }, + "description": "Sample for ImportDocuments", + "file": "discoveryengine_v1beta_generated_document_service_import_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_ImportDocuments_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_import_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.list_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.ListDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.document_service.pagers.ListDocumentsAsyncPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "discoveryengine_v1beta_generated_document_service_list_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_ListDocuments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_list_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.list_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.ListDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.document_service.pagers.ListDocumentsPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "discoveryengine_v1beta_generated_document_service_list_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_ListDocuments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_list_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.purge_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "PurgeDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.PurgeDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "purge_documents" + }, + "description": "Sample for PurgeDocuments", + "file": "discoveryengine_v1beta_generated_document_service_purge_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_purge_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.purge_documents", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "PurgeDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.PurgeDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "purge_documents" + }, + "description": "Sample for PurgeDocuments", + "file": "discoveryengine_v1beta_generated_document_service_purge_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_purge_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.update_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.UpdateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Document", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "discoveryengine_v1beta_generated_document_service_update_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_UpdateDocument_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_update_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.update_document", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.UpdateDocument", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Document", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "discoveryengine_v1beta_generated_document_service_update_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_UpdateDocument_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_update_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceAsyncClient", + "shortName": "EngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceAsyncClient.create_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService.CreateEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService", + "shortName": "EngineService" + }, + "shortName": "CreateEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.CreateEngineRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "engine", + "type": "google.cloud.discoveryengine_v1beta.types.Engine" + }, + { + "name": "engine_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_engine" + }, + "description": "Sample for CreateEngine", + "file": "discoveryengine_v1beta_generated_engine_service_create_engine_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_EngineService_CreateEngine_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_engine_service_create_engine_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceClient", + "shortName": "EngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceClient.create_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService.CreateEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService", + "shortName": "EngineService" + }, + "shortName": "CreateEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.CreateEngineRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "engine", + "type": "google.cloud.discoveryengine_v1beta.types.Engine" + }, + { + "name": "engine_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_engine" + }, + "description": "Sample for CreateEngine", + "file": "discoveryengine_v1beta_generated_engine_service_create_engine_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_EngineService_CreateEngine_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_engine_service_create_engine_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceAsyncClient", + "shortName": "EngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceAsyncClient.delete_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService.DeleteEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService", + "shortName": "EngineService" + }, + "shortName": "DeleteEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.DeleteEngineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_engine" + }, + "description": "Sample for DeleteEngine", + "file": "discoveryengine_v1beta_generated_engine_service_delete_engine_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_EngineService_DeleteEngine_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_engine_service_delete_engine_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceClient", + "shortName": "EngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceClient.delete_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService.DeleteEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService", + "shortName": "EngineService" + }, + "shortName": "DeleteEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.DeleteEngineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_engine" + }, + "description": "Sample for DeleteEngine", + "file": "discoveryengine_v1beta_generated_engine_service_delete_engine_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_EngineService_DeleteEngine_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_engine_service_delete_engine_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceAsyncClient", + "shortName": "EngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceAsyncClient.get_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService.GetEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService", + "shortName": "EngineService" + }, + "shortName": "GetEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.GetEngineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Engine", + "shortName": "get_engine" + }, + "description": "Sample for GetEngine", + "file": "discoveryengine_v1beta_generated_engine_service_get_engine_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_EngineService_GetEngine_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_engine_service_get_engine_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceClient", + "shortName": "EngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceClient.get_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService.GetEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService", + "shortName": "EngineService" + }, + "shortName": "GetEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.GetEngineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Engine", + "shortName": "get_engine" + }, + "description": "Sample for GetEngine", + "file": "discoveryengine_v1beta_generated_engine_service_get_engine_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_EngineService_GetEngine_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_engine_service_get_engine_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceAsyncClient", + "shortName": "EngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceAsyncClient.list_engines", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService.ListEngines", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService", + "shortName": "EngineService" + }, + "shortName": "ListEngines" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListEnginesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.engine_service.pagers.ListEnginesAsyncPager", + "shortName": "list_engines" + }, + "description": "Sample for ListEngines", + "file": "discoveryengine_v1beta_generated_engine_service_list_engines_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_EngineService_ListEngines_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_engine_service_list_engines_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceClient", + "shortName": "EngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceClient.list_engines", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService.ListEngines", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService", + "shortName": "EngineService" + }, + "shortName": "ListEngines" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListEnginesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.engine_service.pagers.ListEnginesPager", + "shortName": "list_engines" + }, + "description": "Sample for ListEngines", + "file": "discoveryengine_v1beta_generated_engine_service_list_engines_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_EngineService_ListEngines_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_engine_service_list_engines_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceAsyncClient", + "shortName": "EngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceAsyncClient.update_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService.UpdateEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService", + "shortName": "EngineService" + }, + "shortName": "UpdateEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateEngineRequest" + }, + { + "name": "engine", + "type": "google.cloud.discoveryengine_v1beta.types.Engine" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Engine", + "shortName": "update_engine" + }, + "description": "Sample for UpdateEngine", + "file": "discoveryengine_v1beta_generated_engine_service_update_engine_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_EngineService_UpdateEngine_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_engine_service_update_engine_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceClient", + "shortName": "EngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.EngineServiceClient.update_engine", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService.UpdateEngine", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.EngineService", + "shortName": "EngineService" + }, + "shortName": "UpdateEngine" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateEngineRequest" + }, + { + "name": "engine", + "type": "google.cloud.discoveryengine_v1beta.types.Engine" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Engine", + "shortName": "update_engine" + }, + "description": "Sample for UpdateEngine", + "file": "discoveryengine_v1beta_generated_engine_service_update_engine_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_EngineService_UpdateEngine_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_engine_service_update_engine_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.RecommendationServiceAsyncClient", + "shortName": "RecommendationServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.RecommendationServiceAsyncClient.recommend", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.RecommendationService.Recommend", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.RecommendationService", + "shortName": "RecommendationService" + }, + "shortName": "Recommend" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.RecommendRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.RecommendResponse", + "shortName": "recommend" + }, + "description": "Sample for Recommend", + "file": "discoveryengine_v1beta_generated_recommendation_service_recommend_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_RecommendationService_Recommend_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_recommendation_service_recommend_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.RecommendationServiceClient", + "shortName": "RecommendationServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.RecommendationServiceClient.recommend", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.RecommendationService.Recommend", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.RecommendationService", + "shortName": "RecommendationService" + }, + "shortName": "Recommend" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.RecommendRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.RecommendResponse", + "shortName": "recommend" + }, + "description": "Sample for Recommend", + "file": "discoveryengine_v1beta_generated_recommendation_service_recommend_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_RecommendationService_Recommend_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_recommendation_service_recommend_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient.create_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.CreateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.cloud.discoveryengine_v1beta.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_schema" + }, + "description": "Sample for CreateSchema", + "file": "discoveryengine_v1beta_generated_schema_service_create_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_CreateSchema_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_create_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient.create_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.CreateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.cloud.discoveryengine_v1beta.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_schema" + }, + "description": "Sample for CreateSchema", + "file": "discoveryengine_v1beta_generated_schema_service_create_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_CreateSchema_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_create_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient.delete_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.DeleteSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_schema" + }, + "description": "Sample for DeleteSchema", + "file": "discoveryengine_v1beta_generated_schema_service_delete_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_DeleteSchema_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_delete_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient.delete_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.DeleteSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_schema" + }, + "description": "Sample for DeleteSchema", + "file": "discoveryengine_v1beta_generated_schema_service_delete_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_DeleteSchema_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_delete_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient.get_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.GetSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "discoveryengine_v1beta_generated_schema_service_get_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_GetSchema_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_get_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient.get_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.GetSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "discoveryengine_v1beta_generated_schema_service_get_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_GetSchema_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_get_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient.list_schemas", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.schema_service.pagers.ListSchemasAsyncPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "discoveryengine_v1beta_generated_schema_service_list_schemas_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_ListSchemas_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_list_schemas_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient.list_schemas", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.schema_service.pagers.ListSchemasPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "discoveryengine_v1beta_generated_schema_service_list_schemas_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_ListSchemas_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_list_schemas_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient.update_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.UpdateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "UpdateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateSchemaRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_schema" + }, + "description": "Sample for UpdateSchema", + "file": "discoveryengine_v1beta_generated_schema_service_update_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_UpdateSchema_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_update_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient.update_schema", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.UpdateSchema", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "UpdateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.UpdateSchemaRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_schema" + }, + "description": "Sample for UpdateSchema", + "file": "discoveryengine_v1beta_generated_schema_service_update_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SchemaService_UpdateSchema_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_schema_service_update_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SearchServiceAsyncClient", + "shortName": "SearchServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SearchServiceAsyncClient.search", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SearchService.Search", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SearchService", + "shortName": "SearchService" + }, + "shortName": "Search" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.SearchRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.search_service.pagers.SearchAsyncPager", + "shortName": "search" + }, + "description": "Sample for Search", + "file": "discoveryengine_v1beta_generated_search_service_search_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SearchService_Search_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_search_service_search_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.SearchServiceClient", + "shortName": "SearchServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.SearchServiceClient.search", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.SearchService.Search", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.SearchService", + "shortName": "SearchService" + }, + "shortName": "Search" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.SearchRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.services.search_service.pagers.SearchPager", + "shortName": "search" + }, + "description": "Sample for Search", + "file": "discoveryengine_v1beta_generated_search_service_search_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_SearchService_Search_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_search_service_search_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ServingConfigServiceAsyncClient", + "shortName": "ServingConfigServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ServingConfigServiceAsyncClient.get_serving_config", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ServingConfigService.GetServingConfig", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ServingConfigService", + "shortName": "ServingConfigService" + }, + "shortName": "GetServingConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.GetServingConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.ServingConfig", + "shortName": "get_serving_config" + }, + "description": "Sample for GetServingConfig", + "file": "discoveryengine_v1beta_generated_serving_config_service_get_serving_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_ServingConfigService_GetServingConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_serving_config_service_get_serving_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.ServingConfigServiceClient", + "shortName": "ServingConfigServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.ServingConfigServiceClient.get_serving_config", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.ServingConfigService.GetServingConfig", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.ServingConfigService", + "shortName": "ServingConfigService" + }, + "shortName": "GetServingConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.GetServingConfigRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -1106,22 +5463,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.Conversation", - "shortName": "update_conversation" + "resultType": "google.cloud.discoveryengine_v1beta.types.ServingConfig", + "shortName": "get_serving_config" }, - "description": "Sample for UpdateConversation", - "file": "discoveryengine_v1beta_generated_conversational_search_service_update_conversation_sync.py", + "description": "Sample for GetServingConfig", + "file": "discoveryengine_v1beta_generated_serving_config_service_get_serving_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_ConversationalSearchService_UpdateConversation_sync", + "regionTag": "discoveryengine_v1beta_generated_ServingConfigService_GetServingConfig_sync", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1131,57 +5488,49 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_conversational_search_service_update_conversation_sync.py" + "title": "discoveryengine_v1beta_generated_serving_config_service_get_serving_config_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.ServingConfigServiceAsyncClient", + "shortName": "ServingConfigServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.create_document", + "fullName": "google.cloud.discoveryengine_v1beta.ServingConfigServiceAsyncClient.list_serving_configs", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.CreateDocument", + "fullName": "google.cloud.discoveryengine.v1beta.ServingConfigService.ListServingConfigs", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1beta.ServingConfigService", + "shortName": "ServingConfigService" }, - "shortName": "CreateDocument" + "shortName": "ListServingConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.CreateDocumentRequest" + "type": "google.cloud.discoveryengine_v1beta.types.ListServingConfigsRequest" }, { "name": "parent", "type": "str" }, - { - "name": "document", - "type": "google.cloud.discoveryengine_v1beta.types.Document" - }, - { - "name": "document_id", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1195,14 +5544,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.Document", - "shortName": "create_document" + "resultType": "google.cloud.discoveryengine_v1beta.services.serving_config_service.pagers.ListServingConfigsAsyncPager", + "shortName": "list_serving_configs" }, - "description": "Sample for CreateDocument", - "file": "discoveryengine_v1beta_generated_document_service_create_document_async.py", + "description": "Sample for ListServingConfigs", + "file": "discoveryengine_v1beta_generated_serving_config_service_list_serving_configs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_CreateDocument_async", + "regionTag": "discoveryengine_v1beta_generated_ServingConfigService_ListServingConfigs_async", "segments": [ { "end": 52, @@ -1220,56 +5569,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_create_document_async.py" + "title": "discoveryengine_v1beta_generated_serving_config_service_list_serving_configs_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.ServingConfigServiceClient", + "shortName": "ServingConfigServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.create_document", + "fullName": "google.cloud.discoveryengine_v1beta.ServingConfigServiceClient.list_serving_configs", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.CreateDocument", + "fullName": "google.cloud.discoveryengine.v1beta.ServingConfigService.ListServingConfigs", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1beta.ServingConfigService", + "shortName": "ServingConfigService" }, - "shortName": "CreateDocument" + "shortName": "ListServingConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.CreateDocumentRequest" + "type": "google.cloud.discoveryengine_v1beta.types.ListServingConfigsRequest" }, { "name": "parent", "type": "str" }, - { - "name": "document", - "type": "google.cloud.discoveryengine_v1beta.types.Document" - }, - { - "name": "document_id", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1283,14 +5624,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.Document", - "shortName": "create_document" + "resultType": "google.cloud.discoveryengine_v1beta.services.serving_config_service.pagers.ListServingConfigsPager", + "shortName": "list_serving_configs" }, - "description": "Sample for CreateDocument", - "file": "discoveryengine_v1beta_generated_document_service_create_document_sync.py", + "description": "Sample for ListServingConfigs", + "file": "discoveryengine_v1beta_generated_serving_config_service_list_serving_configs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_CreateDocument_sync", + "regionTag": "discoveryengine_v1beta_generated_ServingConfigService_ListServingConfigs_sync", "segments": [ { "end": 52, @@ -1308,48 +5649,52 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_create_document_sync.py" + "title": "discoveryengine_v1beta_generated_serving_config_service_list_serving_configs_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.ServingConfigServiceAsyncClient", + "shortName": "ServingConfigServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.delete_document", + "fullName": "google.cloud.discoveryengine_v1beta.ServingConfigServiceAsyncClient.update_serving_config", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.DeleteDocument", + "fullName": "google.cloud.discoveryengine.v1beta.ServingConfigService.UpdateServingConfig", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1beta.ServingConfigService", + "shortName": "ServingConfigService" }, - "shortName": "DeleteDocument" + "shortName": "UpdateServingConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.DeleteDocumentRequest" + "type": "google.cloud.discoveryengine_v1beta.types.UpdateServingConfigRequest" }, { - "name": "name", - "type": "str" + "name": "serving_config", + "type": "google.cloud.discoveryengine_v1beta.types.ServingConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -1364,21 +5709,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_document" + "resultType": "google.cloud.discoveryengine_v1beta.types.ServingConfig", + "shortName": "update_serving_config" }, - "description": "Sample for DeleteDocument", - "file": "discoveryengine_v1beta_generated_document_service_delete_document_async.py", + "description": "Sample for UpdateServingConfig", + "file": "discoveryengine_v1beta_generated_serving_config_service_update_serving_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_DeleteDocument_async", + "regionTag": "discoveryengine_v1beta_generated_ServingConfigService_UpdateServingConfig_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, @@ -1388,45 +5734,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_delete_document_async.py" + "title": "discoveryengine_v1beta_generated_serving_config_service_update_serving_config_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.ServingConfigServiceClient", + "shortName": "ServingConfigServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.delete_document", + "fullName": "google.cloud.discoveryengine_v1beta.ServingConfigServiceClient.update_serving_config", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.DeleteDocument", + "fullName": "google.cloud.discoveryengine.v1beta.ServingConfigService.UpdateServingConfig", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1beta.ServingConfigService", + "shortName": "ServingConfigService" }, - "shortName": "DeleteDocument" + "shortName": "UpdateServingConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.DeleteDocumentRequest" + "type": "google.cloud.discoveryengine_v1beta.types.UpdateServingConfigRequest" }, { - "name": "name", - "type": "str" + "name": "serving_config", + "type": "google.cloud.discoveryengine_v1beta.types.ServingConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -1441,21 +5793,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_document" + "resultType": "google.cloud.discoveryengine_v1beta.types.ServingConfig", + "shortName": "update_serving_config" }, - "description": "Sample for DeleteDocument", - "file": "discoveryengine_v1beta_generated_document_service_delete_document_sync.py", + "description": "Sample for UpdateServingConfig", + "file": "discoveryengine_v1beta_generated_serving_config_service_update_serving_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_DeleteDocument_sync", + "regionTag": "discoveryengine_v1beta_generated_ServingConfigService_UpdateServingConfig_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, @@ -1465,46 +5818,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_delete_document_sync.py" + "title": "discoveryengine_v1beta_generated_serving_config_service_update_serving_config_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.get_document", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient.batch_create_target_sites", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.GetDocument", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchCreateTargetSites", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "GetDocument" + "shortName": "BatchCreateTargetSites" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.GetDocumentRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.discoveryengine_v1beta.types.BatchCreateTargetSitesRequest" }, { "name": "retry", @@ -1519,22 +5870,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.Document", - "shortName": "get_document" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_create_target_sites" }, - "description": "Sample for GetDocument", - "file": "discoveryengine_v1beta_generated_document_service_get_document_async.py", + "description": "Sample for BatchCreateTargetSites", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_batch_create_target_sites_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_GetDocument_async", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_BatchCreateTargetSites_async", "segments": [ { - "end": 51, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 60, "start": 27, "type": "SHORT" }, @@ -1544,47 +5895,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_get_document_async.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_batch_create_target_sites_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.get_document", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient.batch_create_target_sites", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.GetDocument", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchCreateTargetSites", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "GetDocument" + "shortName": "BatchCreateTargetSites" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.GetDocumentRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.discoveryengine_v1beta.types.BatchCreateTargetSitesRequest" }, { "name": "retry", @@ -1599,22 +5946,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.Document", - "shortName": "get_document" + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_create_target_sites" }, - "description": "Sample for GetDocument", - "file": "discoveryengine_v1beta_generated_document_service_get_document_sync.py", + "description": "Sample for BatchCreateTargetSites", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_batch_create_target_sites_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_GetDocument_sync", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_BatchCreateTargetSites_sync", "segments": [ { - "end": 51, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 60, "start": 27, "type": "SHORT" }, @@ -1624,44 +5971,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_get_document_sync.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_batch_create_target_sites_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.import_documents", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient.batch_verify_target_sites", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.ImportDocuments", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchVerifyTargetSites", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "ImportDocuments" + "shortName": "BatchVerifyTargetSites" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.ImportDocumentsRequest" + "type": "google.cloud.discoveryengine_v1beta.types.BatchVerifyTargetSitesRequest" }, { "name": "retry", @@ -1677,13 +6024,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "import_documents" + "shortName": "batch_verify_target_sites" }, - "description": "Sample for ImportDocuments", - "file": "discoveryengine_v1beta_generated_document_service_import_documents_async.py", + "description": "Sample for BatchVerifyTargetSites", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_batch_verify_target_sites_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_ImportDocuments_async", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_BatchVerifyTargetSites_async", "segments": [ { "end": 55, @@ -1716,28 +6063,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_import_documents_async.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_batch_verify_target_sites_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.import_documents", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient.batch_verify_target_sites", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.ImportDocuments", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.BatchVerifyTargetSites", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "ImportDocuments" + "shortName": "BatchVerifyTargetSites" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.ImportDocumentsRequest" + "type": "google.cloud.discoveryengine_v1beta.types.BatchVerifyTargetSitesRequest" }, { "name": "retry", @@ -1753,13 +6100,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "import_documents" + "shortName": "batch_verify_target_sites" }, - "description": "Sample for ImportDocuments", - "file": "discoveryengine_v1beta_generated_document_service_import_documents_sync.py", + "description": "Sample for BatchVerifyTargetSites", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_batch_verify_target_sites_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_ImportDocuments_sync", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_BatchVerifyTargetSites_sync", "segments": [ { "end": 55, @@ -1792,34 +6139,38 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_import_documents_sync.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_batch_verify_target_sites_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.list_documents", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient.create_target_site", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.ListDocuments", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.CreateTargetSite", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "ListDocuments" + "shortName": "CreateTargetSite" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.ListDocumentsRequest" + "type": "google.cloud.discoveryengine_v1beta.types.CreateTargetSiteRequest" }, { "name": "parent", "type": "str" }, + { + "name": "target_site", + "type": "google.cloud.discoveryengine_v1beta.types.TargetSite" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1833,22 +6184,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.services.document_service.pagers.ListDocumentsAsyncPager", - "shortName": "list_documents" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_target_site" }, - "description": "Sample for ListDocuments", - "file": "discoveryengine_v1beta_generated_document_service_list_documents_async.py", + "description": "Sample for CreateTargetSite", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_create_target_site_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_ListDocuments_async", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_CreateTargetSite_async", "segments": [ { - "end": 52, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 59, "start": 27, "type": "SHORT" }, @@ -1858,48 +6209,52 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_list_documents_async.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_create_target_site_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.list_documents", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient.create_target_site", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.ListDocuments", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.CreateTargetSite", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "ListDocuments" + "shortName": "CreateTargetSite" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.ListDocumentsRequest" + "type": "google.cloud.discoveryengine_v1beta.types.CreateTargetSiteRequest" }, { "name": "parent", "type": "str" }, + { + "name": "target_site", + "type": "google.cloud.discoveryengine_v1beta.types.TargetSite" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1913,22 +6268,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.services.document_service.pagers.ListDocumentsPager", - "shortName": "list_documents" + "resultType": "google.api_core.operation.Operation", + "shortName": "create_target_site" }, - "description": "Sample for ListDocuments", - "file": "discoveryengine_v1beta_generated_document_service_list_documents_sync.py", + "description": "Sample for CreateTargetSite", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_create_target_site_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_ListDocuments_sync", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_CreateTargetSite_sync", "segments": [ { - "end": 52, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 59, "start": 27, "type": "SHORT" }, @@ -1938,44 +6293,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_list_documents_sync.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_create_target_site_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.purge_documents", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient.delete_target_site", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DeleteTargetSite", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "PurgeDocuments" + "shortName": "DeleteTargetSite" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.PurgeDocumentsRequest" + "type": "google.cloud.discoveryengine_v1beta.types.DeleteTargetSiteRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1991,21 +6350,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "purge_documents" + "shortName": "delete_target_site" }, - "description": "Sample for PurgeDocuments", - "file": "discoveryengine_v1beta_generated_document_service_purge_documents_async.py", + "description": "Sample for DeleteTargetSite", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_delete_target_site_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_async", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_DeleteTargetSite_async", "segments": [ { - "end": 56, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2015,43 +6374,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_purge_documents_async.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_delete_target_site_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.purge_documents", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient.delete_target_site", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DeleteTargetSite", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "PurgeDocuments" + "shortName": "DeleteTargetSite" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.PurgeDocumentsRequest" + "type": "google.cloud.discoveryengine_v1beta.types.DeleteTargetSiteRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -2067,21 +6430,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "purge_documents" + "shortName": "delete_target_site" }, - "description": "Sample for PurgeDocuments", - "file": "discoveryengine_v1beta_generated_document_service_purge_documents_sync.py", + "description": "Sample for DeleteTargetSite", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_delete_target_site_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_sync", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_DeleteTargetSite_sync", "segments": [ { - "end": 56, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2091,44 +6454,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_purge_documents_sync.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_delete_target_site_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", - "shortName": "DocumentServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.update_document", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient.disable_advanced_site_search", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.UpdateDocument", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DisableAdvancedSiteSearch", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "UpdateDocument" + "shortName": "DisableAdvancedSiteSearch" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.UpdateDocumentRequest" + "type": "google.cloud.discoveryengine_v1beta.types.DisableAdvancedSiteSearchRequest" }, { "name": "retry", @@ -2143,22 +6506,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.Document", - "shortName": "update_document" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "disable_advanced_site_search" }, - "description": "Sample for UpdateDocument", - "file": "discoveryengine_v1beta_generated_document_service_update_document_async.py", + "description": "Sample for DisableAdvancedSiteSearch", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_disable_advanced_site_search_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_UpdateDocument_async", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_async", "segments": [ { - "end": 50, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2168,43 +6531,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_update_document_async.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_disable_advanced_site_search_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", - "shortName": "DocumentServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.update_document", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient.disable_advanced_site_search", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.UpdateDocument", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.DisableAdvancedSiteSearch", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", - "shortName": "DocumentService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "UpdateDocument" + "shortName": "DisableAdvancedSiteSearch" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.UpdateDocumentRequest" + "type": "google.cloud.discoveryengine_v1beta.types.DisableAdvancedSiteSearchRequest" }, { "name": "retry", @@ -2219,22 +6582,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.Document", - "shortName": "update_document" + "resultType": "google.api_core.operation.Operation", + "shortName": "disable_advanced_site_search" }, - "description": "Sample for UpdateDocument", - "file": "discoveryengine_v1beta_generated_document_service_update_document_sync.py", + "description": "Sample for DisableAdvancedSiteSearch", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_disable_advanced_site_search_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_DocumentService_UpdateDocument_sync", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_DisableAdvancedSiteSearch_sync", "segments": [ { - "end": 50, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2244,44 +6607,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_document_service_update_document_sync.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_disable_advanced_site_search_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.RecommendationServiceAsyncClient", - "shortName": "RecommendationServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.RecommendationServiceAsyncClient.recommend", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient.enable_advanced_site_search", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.RecommendationService.Recommend", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.EnableAdvancedSiteSearch", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.RecommendationService", - "shortName": "RecommendationService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "Recommend" + "shortName": "EnableAdvancedSiteSearch" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.RecommendRequest" + "type": "google.cloud.discoveryengine_v1beta.types.EnableAdvancedSiteSearchRequest" }, { "name": "retry", @@ -2296,22 +6659,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.RecommendResponse", - "shortName": "recommend" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "enable_advanced_site_search" }, - "description": "Sample for Recommend", - "file": "discoveryengine_v1beta_generated_recommendation_service_recommend_async.py", + "description": "Sample for EnableAdvancedSiteSearch", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_enable_advanced_site_search_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_RecommendationService_Recommend_async", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_async", "segments": [ { - "end": 56, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2321,43 +6684,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_recommendation_service_recommend_async.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_enable_advanced_site_search_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.RecommendationServiceClient", - "shortName": "RecommendationServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.RecommendationServiceClient.recommend", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient.enable_advanced_site_search", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.RecommendationService.Recommend", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.EnableAdvancedSiteSearch", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.RecommendationService", - "shortName": "RecommendationService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "Recommend" + "shortName": "EnableAdvancedSiteSearch" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.RecommendRequest" + "type": "google.cloud.discoveryengine_v1beta.types.EnableAdvancedSiteSearchRequest" }, { "name": "retry", @@ -2372,22 +6735,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.RecommendResponse", - "shortName": "recommend" + "resultType": "google.api_core.operation.Operation", + "shortName": "enable_advanced_site_search" }, - "description": "Sample for Recommend", - "file": "discoveryengine_v1beta_generated_recommendation_service_recommend_sync.py", + "description": "Sample for EnableAdvancedSiteSearch", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_enable_advanced_site_search_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_RecommendationService_Recommend_sync", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_EnableAdvancedSiteSearch_sync", "segments": [ { - "end": 56, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2397,56 +6760,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_recommendation_service_recommend_sync.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_enable_advanced_site_search_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient.create_schema", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient.fetch_domain_verification_status", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.CreateSchema", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.FetchDomainVerificationStatus", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "CreateSchema" + "shortName": "FetchDomainVerificationStatus" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.CreateSchemaRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "schema", - "type": "google.cloud.discoveryengine_v1beta.types.Schema" - }, - { - "name": "schema_id", - "type": "str" + "type": "google.cloud.discoveryengine_v1beta.types.FetchDomainVerificationStatusRequest" }, { "name": "retry", @@ -2461,22 +6812,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_schema" + "resultType": "google.cloud.discoveryengine_v1beta.services.site_search_engine_service.pagers.FetchDomainVerificationStatusAsyncPager", + "shortName": "fetch_domain_verification_status" }, - "description": "Sample for CreateSchema", - "file": "discoveryengine_v1beta_generated_schema_service_create_schema_async.py", + "description": "Sample for FetchDomainVerificationStatus", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_fetch_domain_verification_status_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_SchemaService_CreateSchema_async", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_FetchDomainVerificationStatus_async", "segments": [ { - "end": 56, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2486,55 +6837,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, - { - "end": 57, - "start": 54, + { + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_schema_service_create_schema_async.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_fetch_domain_verification_status_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient", - "shortName": "SchemaServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient.create_schema", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient.fetch_domain_verification_status", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.CreateSchema", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.FetchDomainVerificationStatus", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "CreateSchema" + "shortName": "FetchDomainVerificationStatus" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.CreateSchemaRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "schema", - "type": "google.cloud.discoveryengine_v1beta.types.Schema" - }, - { - "name": "schema_id", - "type": "str" + "type": "google.cloud.discoveryengine_v1beta.types.FetchDomainVerificationStatusRequest" }, { "name": "retry", @@ -2549,22 +6888,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_schema" + "resultType": "google.cloud.discoveryengine_v1beta.services.site_search_engine_service.pagers.FetchDomainVerificationStatusPager", + "shortName": "fetch_domain_verification_status" }, - "description": "Sample for CreateSchema", - "file": "discoveryengine_v1beta_generated_schema_service_create_schema_sync.py", + "description": "Sample for FetchDomainVerificationStatus", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_fetch_domain_verification_status_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_SchemaService_CreateSchema_sync", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_FetchDomainVerificationStatus_sync", "segments": [ { - "end": 56, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2574,44 +6913,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_schema_service_create_schema_sync.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_fetch_domain_verification_status_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient.delete_schema", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient.get_site_search_engine", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.DeleteSchema", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.GetSiteSearchEngine", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "DeleteSchema" + "shortName": "GetSiteSearchEngine" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.DeleteSchemaRequest" + "type": "google.cloud.discoveryengine_v1beta.types.GetSiteSearchEngineRequest" }, { "name": "name", @@ -2630,22 +6969,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_schema" + "resultType": "google.cloud.discoveryengine_v1beta.types.SiteSearchEngine", + "shortName": "get_site_search_engine" }, - "description": "Sample for DeleteSchema", - "file": "discoveryengine_v1beta_generated_schema_service_delete_schema_async.py", + "description": "Sample for GetSiteSearchEngine", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_get_site_search_engine_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_SchemaService_DeleteSchema_async", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_GetSiteSearchEngine_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2660,38 +6999,38 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_schema_service_delete_schema_async.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_get_site_search_engine_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient", - "shortName": "SchemaServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient.delete_schema", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient.get_site_search_engine", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.DeleteSchema", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.GetSiteSearchEngine", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "DeleteSchema" + "shortName": "GetSiteSearchEngine" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.DeleteSchemaRequest" + "type": "google.cloud.discoveryengine_v1beta.types.GetSiteSearchEngineRequest" }, { "name": "name", @@ -2710,22 +7049,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_schema" + "resultType": "google.cloud.discoveryengine_v1beta.types.SiteSearchEngine", + "shortName": "get_site_search_engine" }, - "description": "Sample for DeleteSchema", - "file": "discoveryengine_v1beta_generated_schema_service_delete_schema_sync.py", + "description": "Sample for GetSiteSearchEngine", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_get_site_search_engine_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_SchemaService_DeleteSchema_sync", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_GetSiteSearchEngine_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2740,39 +7079,39 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_schema_service_delete_schema_sync.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_get_site_search_engine_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient.get_schema", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient.get_target_site", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.GetSchema", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.GetTargetSite", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "GetSchema" + "shortName": "GetTargetSite" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.GetSchemaRequest" + "type": "google.cloud.discoveryengine_v1beta.types.GetTargetSiteRequest" }, { "name": "name", @@ -2791,14 +7130,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.Schema", - "shortName": "get_schema" + "resultType": "google.cloud.discoveryengine_v1beta.types.TargetSite", + "shortName": "get_target_site" }, - "description": "Sample for GetSchema", - "file": "discoveryengine_v1beta_generated_schema_service_get_schema_async.py", + "description": "Sample for GetTargetSite", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_get_target_site_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_SchemaService_GetSchema_async", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_GetTargetSite_async", "segments": [ { "end": 51, @@ -2831,28 +7170,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_schema_service_get_schema_async.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_get_target_site_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient", - "shortName": "SchemaServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient.get_schema", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient.get_target_site", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.GetSchema", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.GetTargetSite", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "GetSchema" + "shortName": "GetTargetSite" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.GetSchemaRequest" + "type": "google.cloud.discoveryengine_v1beta.types.GetTargetSiteRequest" }, { "name": "name", @@ -2871,14 +7210,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.types.Schema", - "shortName": "get_schema" + "resultType": "google.cloud.discoveryengine_v1beta.types.TargetSite", + "shortName": "get_target_site" }, - "description": "Sample for GetSchema", - "file": "discoveryengine_v1beta_generated_schema_service_get_schema_sync.py", + "description": "Sample for GetTargetSite", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_get_target_site_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_SchemaService_GetSchema_sync", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_GetTargetSite_sync", "segments": [ { "end": 51, @@ -2911,29 +7250,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_schema_service_get_schema_sync.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_get_target_site_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient.list_schemas", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient.list_target_sites", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.ListTargetSites", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "ListSchemas" + "shortName": "ListTargetSites" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.ListSchemasRequest" + "type": "google.cloud.discoveryengine_v1beta.types.ListTargetSitesRequest" }, { "name": "parent", @@ -2952,14 +7291,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.services.schema_service.pagers.ListSchemasAsyncPager", - "shortName": "list_schemas" + "resultType": "google.cloud.discoveryengine_v1beta.services.site_search_engine_service.pagers.ListTargetSitesAsyncPager", + "shortName": "list_target_sites" }, - "description": "Sample for ListSchemas", - "file": "discoveryengine_v1beta_generated_schema_service_list_schemas_async.py", + "description": "Sample for ListTargetSites", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_list_target_sites_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_SchemaService_ListSchemas_async", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_ListTargetSites_async", "segments": [ { "end": 52, @@ -2992,28 +7331,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_schema_service_list_schemas_async.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_list_target_sites_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient", - "shortName": "SchemaServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient.list_schemas", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient.list_target_sites", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.ListSchemas", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.ListTargetSites", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "ListSchemas" + "shortName": "ListTargetSites" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.ListSchemasRequest" + "type": "google.cloud.discoveryengine_v1beta.types.ListTargetSitesRequest" }, { "name": "parent", @@ -3032,14 +7371,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.services.schema_service.pagers.ListSchemasPager", - "shortName": "list_schemas" + "resultType": "google.cloud.discoveryengine_v1beta.services.site_search_engine_service.pagers.ListTargetSitesPager", + "shortName": "list_target_sites" }, - "description": "Sample for ListSchemas", - "file": "discoveryengine_v1beta_generated_schema_service_list_schemas_sync.py", + "description": "Sample for ListTargetSites", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_list_target_sites_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_SchemaService_ListSchemas_sync", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_ListTargetSites_sync", "segments": [ { "end": 52, @@ -3072,29 +7411,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_schema_service_list_schemas_sync.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_list_target_sites_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceAsyncClient.update_schema", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient.recrawl_uris", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.UpdateSchema", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.RecrawlUris", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "UpdateSchema" + "shortName": "RecrawlUris" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.UpdateSchemaRequest" + "type": "google.cloud.discoveryengine_v1beta.types.RecrawlUrisRequest" }, { "name": "retry", @@ -3110,21 +7449,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_schema" + "shortName": "recrawl_uris" }, - "description": "Sample for UpdateSchema", - "file": "discoveryengine_v1beta_generated_schema_service_update_schema_async.py", + "description": "Sample for RecrawlUris", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_recrawl_uris_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_SchemaService_UpdateSchema_async", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_RecrawlUris_async", "segments": [ { - "end": 54, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 56, "start": 27, "type": "SHORT" }, @@ -3134,43 +7473,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_schema_service_update_schema_async.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_recrawl_uris_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient", - "shortName": "SchemaServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.SchemaServiceClient.update_schema", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient.recrawl_uris", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService.UpdateSchema", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.RecrawlUris", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.SchemaService", - "shortName": "SchemaService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "UpdateSchema" + "shortName": "RecrawlUris" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.UpdateSchemaRequest" + "type": "google.cloud.discoveryengine_v1beta.types.RecrawlUrisRequest" }, { "name": "retry", @@ -3186,21 +7525,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "update_schema" + "shortName": "recrawl_uris" }, - "description": "Sample for UpdateSchema", - "file": "discoveryengine_v1beta_generated_schema_service_update_schema_sync.py", + "description": "Sample for RecrawlUris", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_recrawl_uris_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_SchemaService_UpdateSchema_sync", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_RecrawlUris_sync", "segments": [ { - "end": 54, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 56, "start": 27, "type": "SHORT" }, @@ -3210,44 +7549,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_schema_service_update_schema_sync.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_recrawl_uris_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1beta.SearchServiceAsyncClient", - "shortName": "SearchServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.SearchServiceAsyncClient.search", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceAsyncClient.update_target_site", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.SearchService.Search", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.UpdateTargetSite", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.SearchService", - "shortName": "SearchService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "Search" + "shortName": "UpdateTargetSite" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.SearchRequest" + "type": "google.cloud.discoveryengine_v1beta.types.UpdateTargetSiteRequest" + }, + { + "name": "target_site", + "type": "google.cloud.discoveryengine_v1beta.types.TargetSite" }, { "name": "retry", @@ -3262,22 +7605,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.services.search_service.pagers.SearchAsyncPager", - "shortName": "search" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_target_site" }, - "description": "Sample for Search", - "file": "discoveryengine_v1beta_generated_search_service_search_async.py", + "description": "Sample for UpdateTargetSite", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_update_target_site_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_SearchService_Search_async", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_UpdateTargetSite_async", "segments": [ { - "end": 52, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 58, "start": 27, "type": "SHORT" }, @@ -3287,43 +7630,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 55, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_search_service_search_async.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_update_target_site_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1beta.SearchServiceClient", - "shortName": "SearchServiceClient" + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1beta.SearchServiceClient.search", + "fullName": "google.cloud.discoveryengine_v1beta.SiteSearchEngineServiceClient.update_target_site", "method": { - "fullName": "google.cloud.discoveryengine.v1beta.SearchService.Search", + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService.UpdateTargetSite", "service": { - "fullName": "google.cloud.discoveryengine.v1beta.SearchService", - "shortName": "SearchService" + "fullName": "google.cloud.discoveryengine.v1beta.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" }, - "shortName": "Search" + "shortName": "UpdateTargetSite" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1beta.types.SearchRequest" + "type": "google.cloud.discoveryengine_v1beta.types.UpdateTargetSiteRequest" + }, + { + "name": "target_site", + "type": "google.cloud.discoveryengine_v1beta.types.TargetSite" }, { "name": "retry", @@ -3338,22 +7685,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.discoveryengine_v1beta.services.search_service.pagers.SearchPager", - "shortName": "search" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_target_site" }, - "description": "Sample for Search", - "file": "discoveryengine_v1beta_generated_search_service_search_sync.py", + "description": "Sample for UpdateTargetSite", + "file": "discoveryengine_v1beta_generated_site_search_engine_service_update_target_site_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1beta_generated_SearchService_Search_sync", + "regionTag": "discoveryengine_v1beta_generated_SiteSearchEngineService_UpdateTargetSite_sync", "segments": [ { - "end": 52, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 58, "start": 27, "type": "SHORT" }, @@ -3363,22 +7710,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 55, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1beta_generated_search_service_search_sync.py" + "title": "discoveryengine_v1beta_generated_site_search_engine_service_update_target_site_sync.py" }, { "canonical": true, diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py index 671ba001cfa9..80676102f89a 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py @@ -39,28 +39,52 @@ def partition( class discoveryengineCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_create_target_sites': ('parent', 'requests', ), + 'batch_verify_target_sites': ('parent', ), 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), 'complete_query': ('data_store', 'query', 'query_model', 'user_pseudo_id', 'include_tail_suggestions', ), - 'converse_conversation': ('name', 'query', 'serving_config', 'conversation', 'safe_search', 'user_labels', 'summary_spec', ), + 'converse_conversation': ('name', 'query', 'serving_config', 'conversation', 'safe_search', 'user_labels', 'summary_spec', 'filter', ), 'create_conversation': ('parent', 'conversation', ), + 'create_data_store': ('parent', 'data_store', 'data_store_id', 'create_advanced_site_search', ), 'create_document': ('parent', 'document', 'document_id', ), + 'create_engine': ('parent', 'engine', 'engine_id', ), 'create_schema': ('parent', 'schema', 'schema_id', ), + 'create_target_site': ('parent', 'target_site', ), 'delete_conversation': ('name', ), + 'delete_data_store': ('name', ), 'delete_document': ('name', ), + 'delete_engine': ('name', ), 'delete_schema': ('name', ), + 'delete_target_site': ('name', ), + 'disable_advanced_site_search': ('site_search_engine', ), + 'enable_advanced_site_search': ('site_search_engine', ), + 'fetch_domain_verification_status': ('site_search_engine', 'page_size', 'page_token', ), 'get_conversation': ('name', ), + 'get_data_store': ('name', ), 'get_document': ('name', ), + 'get_engine': ('name', ), 'get_schema': ('name', ), + 'get_site_search_engine': ('name', ), + 'get_target_site': ('name', ), 'import_documents': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'error_config', 'reconciliation_mode', 'auto_generate_ids', 'id_field', ), + 'import_suggestion_deny_list_entries': ('parent', 'inline_source', 'gcs_source', ), 'import_user_events': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'error_config', ), 'list_conversations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_data_stores': ('parent', 'page_size', 'page_token', 'filter', ), 'list_documents': ('parent', 'page_size', 'page_token', ), + 'list_engines': ('parent', 'page_size', 'page_token', 'filter', ), 'list_schemas': ('parent', 'page_size', 'page_token', ), + 'list_target_sites': ('parent', 'page_size', 'page_token', ), 'purge_documents': ('parent', 'filter', 'force', ), - 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'filter', 'order_by', 'user_info', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'safe_search', 'user_labels', ), + 'purge_suggestion_deny_list_entries': ('parent', ), + 'recrawl_uris': ('site_search_engine', 'uris', ), + 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'filter', 'canonical_filter', 'order_by', 'user_info', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'safe_search', 'user_labels', ), 'update_conversation': ('conversation', 'update_mask', ), + 'update_data_store': ('data_store', 'update_mask', ), 'update_document': ('document', 'allow_missing', ), + 'update_engine': ('engine', 'update_mask', ), 'update_schema': ('schema', 'allow_missing', ), + 'update_target_site': ('target_site', ), 'write_user_event': ('parent', 'user_event', ), } diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py index 713ab2895836..b514680a1656 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py @@ -39,29 +39,56 @@ def partition( class discoveryengineCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_create_target_sites': ('parent', 'requests', ), + 'batch_verify_target_sites': ('parent', ), 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), 'complete_query': ('data_store', 'query', 'query_model', 'user_pseudo_id', 'include_tail_suggestions', ), - 'converse_conversation': ('name', 'query', 'serving_config', 'conversation', 'safe_search', 'user_labels', 'summary_spec', ), + 'converse_conversation': ('name', 'query', 'serving_config', 'conversation', 'safe_search', 'user_labels', 'summary_spec', 'filter', ), 'create_conversation': ('parent', 'conversation', ), + 'create_data_store': ('parent', 'data_store', 'data_store_id', 'create_advanced_site_search', ), 'create_document': ('parent', 'document', 'document_id', ), + 'create_engine': ('parent', 'engine', 'engine_id', ), 'create_schema': ('parent', 'schema', 'schema_id', ), + 'create_target_site': ('parent', 'target_site', ), 'delete_conversation': ('name', ), + 'delete_data_store': ('name', ), 'delete_document': ('name', ), + 'delete_engine': ('name', ), 'delete_schema': ('name', ), + 'delete_target_site': ('name', ), + 'disable_advanced_site_search': ('site_search_engine', ), + 'enable_advanced_site_search': ('site_search_engine', ), + 'fetch_domain_verification_status': ('site_search_engine', 'page_size', 'page_token', ), 'get_conversation': ('name', ), + 'get_data_store': ('name', ), 'get_document': ('name', ), + 'get_engine': ('name', ), 'get_schema': ('name', ), + 'get_serving_config': ('name', ), + 'get_site_search_engine': ('name', ), + 'get_target_site': ('name', ), 'import_documents': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'error_config', 'reconciliation_mode', 'auto_generate_ids', 'id_field', ), + 'import_suggestion_deny_list_entries': ('parent', 'inline_source', 'gcs_source', ), 'import_user_events': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'error_config', ), 'list_conversations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_data_stores': ('parent', 'page_size', 'page_token', 'filter', ), 'list_documents': ('parent', 'page_size', 'page_token', ), + 'list_engines': ('parent', 'page_size', 'page_token', 'filter', ), 'list_schemas': ('parent', 'page_size', 'page_token', ), + 'list_serving_configs': ('parent', 'page_size', 'page_token', ), + 'list_target_sites': ('parent', 'page_size', 'page_token', ), 'purge_documents': ('parent', 'filter', 'force', ), + 'purge_suggestion_deny_list_entries': ('parent', ), 'recommend': ('serving_config', 'user_event', 'page_size', 'filter', 'validate_only', 'params', 'user_labels', ), - 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'filter', 'order_by', 'user_info', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'embedding_spec', 'ranking_expression', 'safe_search', 'user_labels', ), + 'recrawl_uris': ('site_search_engine', 'uris', ), + 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'filter', 'canonical_filter', 'order_by', 'user_info', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'embedding_spec', 'ranking_expression', 'safe_search', 'user_labels', ), 'update_conversation': ('conversation', 'update_mask', ), + 'update_data_store': ('data_store', 'update_mask', ), 'update_document': ('document', 'allow_missing', ), + 'update_engine': ('engine', 'update_mask', ), 'update_schema': ('schema', 'allow_missing', ), + 'update_serving_config': ('serving_config', 'update_mask', ), + 'update_target_site': ('target_site', ), 'write_user_event': ('parent', 'user_event', ), } diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py index fc247b80f3d6..fe96fd75c606 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py @@ -26,9 +26,18 @@ import json import math -from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import api_core_version, client_options from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError @@ -49,7 +58,12 @@ CompletionServiceClient, transports, ) -from google.cloud.discoveryengine_v1.types import completion_service +from google.cloud.discoveryengine_v1.types import ( + completion, + completion_service, + import_config, + purge_config, +) def client_cert_source_callback(): @@ -1306,6 +1320,316 @@ async def test_complete_query_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + import_config.ImportSuggestionDenyListEntriesRequest, + dict, + ], +) +def test_import_suggestion_deny_list_entries(request_type, transport: str = "grpc"): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_suggestion_deny_list_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.import_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == import_config.ImportSuggestionDenyListEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_suggestion_deny_list_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_suggestion_deny_list_entries), "__call__" + ) as call: + client.import_suggestion_deny_list_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == import_config.ImportSuggestionDenyListEntriesRequest() + + +@pytest.mark.asyncio +async def test_import_suggestion_deny_list_entries_async( + transport: str = "grpc_asyncio", + request_type=import_config.ImportSuggestionDenyListEntriesRequest, +): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_suggestion_deny_list_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.import_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == import_config.ImportSuggestionDenyListEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_import_suggestion_deny_list_entries_async_from_dict(): + await test_import_suggestion_deny_list_entries_async(request_type=dict) + + +def test_import_suggestion_deny_list_entries_field_headers(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = import_config.ImportSuggestionDenyListEntriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_suggestion_deny_list_entries), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_suggestion_deny_list_entries_field_headers_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = import_config.ImportSuggestionDenyListEntriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_suggestion_deny_list_entries), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.import_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeSuggestionDenyListEntriesRequest, + dict, + ], +) +def test_purge_suggestion_deny_list_entries(request_type, transport: str = "grpc"): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_suggestion_deny_list_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.purge_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeSuggestionDenyListEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_purge_suggestion_deny_list_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_suggestion_deny_list_entries), "__call__" + ) as call: + client.purge_suggestion_deny_list_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeSuggestionDenyListEntriesRequest() + + +@pytest.mark.asyncio +async def test_purge_suggestion_deny_list_entries_async( + transport: str = "grpc_asyncio", + request_type=purge_config.PurgeSuggestionDenyListEntriesRequest, +): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_suggestion_deny_list_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.purge_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeSuggestionDenyListEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_purge_suggestion_deny_list_entries_async_from_dict(): + await test_purge_suggestion_deny_list_entries_async(request_type=dict) + + +def test_purge_suggestion_deny_list_entries_field_headers(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeSuggestionDenyListEntriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_suggestion_deny_list_entries), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.purge_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_purge_suggestion_deny_list_entries_field_headers_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeSuggestionDenyListEntriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_suggestion_deny_list_entries), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.purge_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -1561,23 +1885,459 @@ def test_complete_query_rest_error(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CompletionServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + import_config.ImportSuggestionDenyListEntriesRequest, + dict, + ], +) +def test_import_suggestion_deny_list_entries_rest(request_type): + client = CompletionServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = CompletionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.CompletionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CompletionServiceClient( + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.import_suggestion_deny_list_entries(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_import_suggestion_deny_list_entries_rest_required_fields( + request_type=import_config.ImportSuggestionDenyListEntriesRequest, +): + transport_class = transports.CompletionServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_suggestion_deny_list_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_suggestion_deny_list_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.import_suggestion_deny_list_entries(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_import_suggestion_deny_list_entries_rest_unset_required_fields(): + transport = transports.CompletionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.import_suggestion_deny_list_entries._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_suggestion_deny_list_entries_rest_interceptors(null_interceptor): + transport = transports.CompletionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CompletionServiceRestInterceptor(), + ) + client = CompletionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_import_suggestion_deny_list_entries", + ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "pre_import_suggestion_deny_list_entries", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = import_config.ImportSuggestionDenyListEntriesRequest.pb( + import_config.ImportSuggestionDenyListEntriesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = import_config.ImportSuggestionDenyListEntriesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.import_suggestion_deny_list_entries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_suggestion_deny_list_entries_rest_bad_request( + transport: str = "rest", + request_type=import_config.ImportSuggestionDenyListEntriesRequest, +): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.import_suggestion_deny_list_entries(request) + + +def test_import_suggestion_deny_list_entries_rest_error(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeSuggestionDenyListEntriesRequest, + dict, + ], +) +def test_purge_suggestion_deny_list_entries_rest(request_type): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.purge_suggestion_deny_list_entries(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_purge_suggestion_deny_list_entries_rest_required_fields( + request_type=purge_config.PurgeSuggestionDenyListEntriesRequest, +): + transport_class = transports.CompletionServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_suggestion_deny_list_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_suggestion_deny_list_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.purge_suggestion_deny_list_entries(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_purge_suggestion_deny_list_entries_rest_unset_required_fields(): + transport = transports.CompletionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.purge_suggestion_deny_list_entries._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_purge_suggestion_deny_list_entries_rest_interceptors(null_interceptor): + transport = transports.CompletionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CompletionServiceRestInterceptor(), + ) + client = CompletionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_purge_suggestion_deny_list_entries", + ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "pre_purge_suggestion_deny_list_entries", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = purge_config.PurgeSuggestionDenyListEntriesRequest.pb( + purge_config.PurgeSuggestionDenyListEntriesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = purge_config.PurgeSuggestionDenyListEntriesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.purge_suggestion_deny_list_entries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_purge_suggestion_deny_list_entries_rest_bad_request( + transport: str = "rest", + request_type=purge_config.PurgeSuggestionDenyListEntriesRequest, +): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.purge_suggestion_deny_list_entries(request) + + +def test_purge_suggestion_deny_list_entries_rest_error(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CompletionServiceClient( client_options={"credentials_file": "credentials.json"}, transport=transport, ) @@ -1701,6 +2461,8 @@ def test_completion_service_base_transport(): # raise NotImplementedError. methods = ( "complete_query", + "import_suggestion_deny_list_entries", + "purge_suggestion_deny_list_entries", "get_operation", "list_operations", ) @@ -1711,6 +2473,11 @@ def test_completion_service_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + # Catch all for all remaining methods and properties remainder = [ "kind", @@ -1898,6 +2665,23 @@ def test_completion_service_http_transport_client_cert_source_for_mtls(): mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) +def test_completion_service_rest_lro_client(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + @pytest.mark.parametrize( "transport_name", [ @@ -1964,6 +2748,12 @@ def test_completion_service_client_transport_session_collision(transport_name): session1 = client1.transport.complete_query._session session2 = client2.transport.complete_query._session assert session1 != session2 + session1 = client1.transport.import_suggestion_deny_list_entries._session + session2 = client2.transport.import_suggestion_deny_list_entries._session + assert session1 != session2 + session1 = client1.transport.purge_suggestion_deny_list_entries._session + session2 = client2.transport.purge_suggestion_deny_list_entries._session + assert session1 != session2 def test_completion_service_grpc_transport_channel(): @@ -2092,6 +2882,40 @@ def test_completion_service_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel +def test_completion_service_grpc_lro_client(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_completion_service_grpc_lro_async_client(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + def test_data_store_path(): project = "squid" location = "clam" diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py index a55183978c5a..d0e30e8fb844 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py @@ -3258,6 +3258,25 @@ def test_create_conversation_rest(request_type): "categories": ["categories_value1", "categories_value2"], "scores": [0.656, 0.657], }, + "summary_with_metadata": { + "summary": "summary_value", + "citation_metadata": { + "citations": [ + { + "start_index": 1189, + "end_index": 942, + "sources": [{"reference_index": 1574}], + } + ] + }, + "references": [ + { + "title": "title_value", + "document": "document_value", + "uri": "uri_value", + } + ], + }, } }, "create_time": {"seconds": 751, "nanos": 543}, @@ -3913,6 +3932,25 @@ def test_update_conversation_rest(request_type): "categories": ["categories_value1", "categories_value2"], "scores": [0.656, 0.657], }, + "summary_with_metadata": { + "summary": "summary_value", + "citation_metadata": { + "citations": [ + { + "start_index": 1189, + "end_index": 942, + "sources": [{"reference_index": 1574}], + } + ] + }, + "references": [ + { + "title": "title_value", + "document": "document_value", + "uri": "uri_value", + } + ], + }, } }, "create_time": {"seconds": 751, "nanos": 543}, diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py new file mode 100644 index 000000000000..6a916204d7d5 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py @@ -0,0 +1,5516 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1.services.data_store_service import ( + DataStoreServiceAsyncClient, + DataStoreServiceClient, + pagers, + transports, +) +from google.cloud.discoveryengine_v1.types import common +from google.cloud.discoveryengine_v1.types import data_store +from google.cloud.discoveryengine_v1.types import data_store as gcd_data_store +from google.cloud.discoveryengine_v1.types import data_store_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataStoreServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DataStoreServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DataStoreServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DataStoreServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataStoreServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataStoreServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert DataStoreServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DataStoreServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DataStoreServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + DataStoreServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DataStoreServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DataStoreServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DataStoreServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DataStoreServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DataStoreServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DataStoreServiceClient._get_client_cert_source(None, False) is None + assert ( + DataStoreServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + DataStoreServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + DataStoreServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + DataStoreServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + DataStoreServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceClient), +) +@mock.patch.object( + DataStoreServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DataStoreServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataStoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DataStoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + DataStoreServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + DataStoreServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == DataStoreServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DataStoreServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + DataStoreServiceClient._get_api_endpoint(None, None, default_universe, "always") + == DataStoreServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DataStoreServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == DataStoreServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DataStoreServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + DataStoreServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + DataStoreServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + DataStoreServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + DataStoreServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + DataStoreServiceClient._get_universe_domain(None, None) + == DataStoreServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + DataStoreServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DataStoreServiceClient, transports.DataStoreServiceGrpcTransport, "grpc"), + (DataStoreServiceClient, transports.DataStoreServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DataStoreServiceClient, "grpc"), + (DataStoreServiceAsyncClient, "grpc_asyncio"), + (DataStoreServiceClient, "rest"), + ], +) +def test_data_store_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DataStoreServiceGrpcTransport, "grpc"), + (transports.DataStoreServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DataStoreServiceRestTransport, "rest"), + ], +) +def test_data_store_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DataStoreServiceClient, "grpc"), + (DataStoreServiceAsyncClient, "grpc_asyncio"), + (DataStoreServiceClient, "rest"), + ], +) +def test_data_store_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_data_store_service_client_get_transport_class(): + transport = DataStoreServiceClient.get_transport_class() + available_transports = [ + transports.DataStoreServiceGrpcTransport, + transports.DataStoreServiceRestTransport, + ] + assert transport in available_transports + + transport = DataStoreServiceClient.get_transport_class("grpc") + assert transport == transports.DataStoreServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DataStoreServiceClient, transports.DataStoreServiceGrpcTransport, "grpc"), + ( + DataStoreServiceAsyncClient, + transports.DataStoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DataStoreServiceClient, transports.DataStoreServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + DataStoreServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceClient), +) +@mock.patch.object( + DataStoreServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceAsyncClient), +) +def test_data_store_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataStoreServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataStoreServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + DataStoreServiceClient, + transports.DataStoreServiceGrpcTransport, + "grpc", + "true", + ), + ( + DataStoreServiceAsyncClient, + transports.DataStoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + DataStoreServiceClient, + transports.DataStoreServiceGrpcTransport, + "grpc", + "false", + ), + ( + DataStoreServiceAsyncClient, + transports.DataStoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + DataStoreServiceClient, + transports.DataStoreServiceRestTransport, + "rest", + "true", + ), + ( + DataStoreServiceClient, + transports.DataStoreServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + DataStoreServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceClient), +) +@mock.patch.object( + DataStoreServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_store_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [DataStoreServiceClient, DataStoreServiceAsyncClient] +) +@mock.patch.object( + DataStoreServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataStoreServiceClient), +) +@mock.patch.object( + DataStoreServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataStoreServiceAsyncClient), +) +def test_data_store_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [DataStoreServiceClient, DataStoreServiceAsyncClient] +) +@mock.patch.object( + DataStoreServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceClient), +) +@mock.patch.object( + DataStoreServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceAsyncClient), +) +def test_data_store_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DataStoreServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataStoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DataStoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DataStoreServiceClient, transports.DataStoreServiceGrpcTransport, "grpc"), + ( + DataStoreServiceAsyncClient, + transports.DataStoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DataStoreServiceClient, transports.DataStoreServiceRestTransport, "rest"), + ], +) +def test_data_store_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DataStoreServiceClient, + transports.DataStoreServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DataStoreServiceAsyncClient, + transports.DataStoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + DataStoreServiceClient, + transports.DataStoreServiceRestTransport, + "rest", + None, + ), + ], +) +def test_data_store_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_data_store_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1.services.data_store_service.transports.DataStoreServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DataStoreServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DataStoreServiceClient, + transports.DataStoreServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DataStoreServiceAsyncClient, + transports.DataStoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_data_store_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.CreateDataStoreRequest, + dict, + ], +) +def test_create_data_store(request_type, transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.CreateDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_data_store_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_store), "__call__" + ) as call: + client.create_data_store() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.CreateDataStoreRequest() + + +@pytest.mark.asyncio +async def test_create_data_store_async( + transport: str = "grpc_asyncio", + request_type=data_store_service.CreateDataStoreRequest, +): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.CreateDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_data_store_async_from_dict(): + await test_create_data_store_async(request_type=dict) + + +def test_create_data_store_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.CreateDataStoreRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_store), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_data_store_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.CreateDataStoreRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_store), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_data_store_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_data_store( + parent="parent_value", + data_store=gcd_data_store.DataStore(name="name_value"), + data_store_id="data_store_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].data_store + mock_val = gcd_data_store.DataStore(name="name_value") + assert arg == mock_val + arg = args[0].data_store_id + mock_val = "data_store_id_value" + assert arg == mock_val + + +def test_create_data_store_flattened_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_store( + data_store_service.CreateDataStoreRequest(), + parent="parent_value", + data_store=gcd_data_store.DataStore(name="name_value"), + data_store_id="data_store_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_data_store_flattened_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_data_store( + parent="parent_value", + data_store=gcd_data_store.DataStore(name="name_value"), + data_store_id="data_store_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].data_store + mock_val = gcd_data_store.DataStore(name="name_value") + assert arg == mock_val + arg = args[0].data_store_id + mock_val = "data_store_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_data_store_flattened_error_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_data_store( + data_store_service.CreateDataStoreRequest(), + parent="parent_value", + data_store=gcd_data_store.DataStore(name="name_value"), + data_store_id="data_store_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.GetDataStoreRequest, + dict, + ], +) +def test_get_data_store(request_type, transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_store), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_store.DataStore( + name="name_value", + display_name="display_name_value", + industry_vertical=common.IndustryVertical.GENERIC, + solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], + default_schema_id="default_schema_id_value", + content_config=data_store.DataStore.ContentConfig.NO_CONTENT, + ) + response = client.get_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.GetDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, data_store.DataStore) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.industry_vertical == common.IndustryVertical.GENERIC + assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + assert response.default_schema_id == "default_schema_id_value" + assert response.content_config == data_store.DataStore.ContentConfig.NO_CONTENT + + +def test_get_data_store_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_store), "__call__") as call: + client.get_data_store() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.GetDataStoreRequest() + + +@pytest.mark.asyncio +async def test_get_data_store_async( + transport: str = "grpc_asyncio", request_type=data_store_service.GetDataStoreRequest +): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_store), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_store.DataStore( + name="name_value", + display_name="display_name_value", + industry_vertical=common.IndustryVertical.GENERIC, + solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], + default_schema_id="default_schema_id_value", + content_config=data_store.DataStore.ContentConfig.NO_CONTENT, + ) + ) + response = await client.get_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.GetDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, data_store.DataStore) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.industry_vertical == common.IndustryVertical.GENERIC + assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + assert response.default_schema_id == "default_schema_id_value" + assert response.content_config == data_store.DataStore.ContentConfig.NO_CONTENT + + +@pytest.mark.asyncio +async def test_get_data_store_async_from_dict(): + await test_get_data_store_async(request_type=dict) + + +def test_get_data_store_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.GetDataStoreRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_store), "__call__") as call: + call.return_value = data_store.DataStore() + client.get_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_data_store_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.GetDataStoreRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_store), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_store.DataStore() + ) + await client.get_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_data_store_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_store), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_store.DataStore() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_store( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_data_store_flattened_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_store( + data_store_service.GetDataStoreRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_data_store_flattened_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_store), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_store.DataStore() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_store.DataStore() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_store( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_data_store_flattened_error_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_store( + data_store_service.GetDataStoreRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.ListDataStoresRequest, + dict, + ], +) +def test_list_data_stores(request_type, transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_store_service.ListDataStoresResponse( + next_page_token="next_page_token_value", + ) + response = client.list_data_stores(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.ListDataStoresRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataStoresPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_data_stores_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + client.list_data_stores() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.ListDataStoresRequest() + + +@pytest.mark.asyncio +async def test_list_data_stores_async( + transport: str = "grpc_asyncio", + request_type=data_store_service.ListDataStoresRequest, +): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_store_service.ListDataStoresResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_data_stores(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.ListDataStoresRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataStoresAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_data_stores_async_from_dict(): + await test_list_data_stores_async(request_type=dict) + + +def test_list_data_stores_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.ListDataStoresRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + call.return_value = data_store_service.ListDataStoresResponse() + client.list_data_stores(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_data_stores_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.ListDataStoresRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_store_service.ListDataStoresResponse() + ) + await client.list_data_stores(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_data_stores_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_store_service.ListDataStoresResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_stores( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_data_stores_flattened_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_stores( + data_store_service.ListDataStoresRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_data_stores_flattened_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_store_service.ListDataStoresResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_store_service.ListDataStoresResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_stores( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_data_stores_flattened_error_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_stores( + data_store_service.ListDataStoresRequest(), + parent="parent_value", + ) + + +def test_list_data_stores_pager(transport_name: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + data_store.DataStore(), + ], + next_page_token="abc", + ), + data_store_service.ListDataStoresResponse( + data_stores=[], + next_page_token="def", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + ], + next_page_token="ghi", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_data_stores(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_store.DataStore) for i in results) + + +def test_list_data_stores_pages(transport_name: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + data_store.DataStore(), + ], + next_page_token="abc", + ), + data_store_service.ListDataStoresResponse( + data_stores=[], + next_page_token="def", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + ], + next_page_token="ghi", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_stores(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_data_stores_async_pager(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_stores), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + data_store.DataStore(), + ], + next_page_token="abc", + ), + data_store_service.ListDataStoresResponse( + data_stores=[], + next_page_token="def", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + ], + next_page_token="ghi", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_stores( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, data_store.DataStore) for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_stores_async_pages(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_stores), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + data_store.DataStore(), + ], + next_page_token="abc", + ), + data_store_service.ListDataStoresResponse( + data_stores=[], + next_page_token="def", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + ], + next_page_token="ghi", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_stores(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.DeleteDataStoreRequest, + dict, + ], +) +def test_delete_data_store(request_type, transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.DeleteDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_data_store_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_store), "__call__" + ) as call: + client.delete_data_store() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.DeleteDataStoreRequest() + + +@pytest.mark.asyncio +async def test_delete_data_store_async( + transport: str = "grpc_asyncio", + request_type=data_store_service.DeleteDataStoreRequest, +): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.DeleteDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_data_store_async_from_dict(): + await test_delete_data_store_async(request_type=dict) + + +def test_delete_data_store_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.DeleteDataStoreRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_store), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_data_store_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.DeleteDataStoreRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_store), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_data_store_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_data_store( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_data_store_flattened_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_store( + data_store_service.DeleteDataStoreRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_data_store_flattened_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_data_store( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_data_store_flattened_error_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_data_store( + data_store_service.DeleteDataStoreRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.UpdateDataStoreRequest, + dict, + ], +) +def test_update_data_store(request_type, transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_data_store.DataStore( + name="name_value", + display_name="display_name_value", + industry_vertical=common.IndustryVertical.GENERIC, + solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], + default_schema_id="default_schema_id_value", + content_config=gcd_data_store.DataStore.ContentConfig.NO_CONTENT, + ) + response = client.update_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.UpdateDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_data_store.DataStore) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.industry_vertical == common.IndustryVertical.GENERIC + assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + assert response.default_schema_id == "default_schema_id_value" + assert response.content_config == gcd_data_store.DataStore.ContentConfig.NO_CONTENT + + +def test_update_data_store_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_store), "__call__" + ) as call: + client.update_data_store() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.UpdateDataStoreRequest() + + +@pytest.mark.asyncio +async def test_update_data_store_async( + transport: str = "grpc_asyncio", + request_type=data_store_service.UpdateDataStoreRequest, +): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_data_store.DataStore( + name="name_value", + display_name="display_name_value", + industry_vertical=common.IndustryVertical.GENERIC, + solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], + default_schema_id="default_schema_id_value", + content_config=gcd_data_store.DataStore.ContentConfig.NO_CONTENT, + ) + ) + response = await client.update_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.UpdateDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_data_store.DataStore) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.industry_vertical == common.IndustryVertical.GENERIC + assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + assert response.default_schema_id == "default_schema_id_value" + assert response.content_config == gcd_data_store.DataStore.ContentConfig.NO_CONTENT + + +@pytest.mark.asyncio +async def test_update_data_store_async_from_dict(): + await test_update_data_store_async(request_type=dict) + + +def test_update_data_store_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.UpdateDataStoreRequest() + + request.data_store.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_store), "__call__" + ) as call: + call.return_value = gcd_data_store.DataStore() + client.update_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_data_store_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.UpdateDataStoreRequest() + + request.data_store.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_store), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_data_store.DataStore() + ) + await client.update_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store.name=name_value", + ) in kw["metadata"] + + +def test_update_data_store_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_data_store.DataStore() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_store( + data_store=gcd_data_store.DataStore(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_store + mock_val = gcd_data_store.DataStore(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_data_store_flattened_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_store( + data_store_service.UpdateDataStoreRequest(), + data_store=gcd_data_store.DataStore(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_data_store_flattened_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_data_store.DataStore() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_data_store.DataStore() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_store( + data_store=gcd_data_store.DataStore(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_store + mock_val = gcd_data_store.DataStore(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_data_store_flattened_error_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_store( + data_store_service.UpdateDataStoreRequest(), + data_store=gcd_data_store.DataStore(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.CreateDataStoreRequest, + dict, + ], +) +def test_create_data_store_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["data_store"] = { + "name": "name_value", + "display_name": "display_name_value", + "industry_vertical": 1, + "solution_types": [1], + "default_schema_id": "default_schema_id_value", + "content_config": 1, + "create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = data_store_service.CreateDataStoreRequest.meta.fields["data_store"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_store"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_store"][field])): + del request_init["data_store"][field][i][subfield] + else: + del request_init["data_store"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_data_store(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_data_store_rest_required_fields( + request_type=data_store_service.CreateDataStoreRequest, +): + transport_class = transports.DataStoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["data_store_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "dataStoreId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_data_store._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "dataStoreId" in jsonified_request + assert jsonified_request["dataStoreId"] == request_init["data_store_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["dataStoreId"] = "data_store_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_data_store._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "create_advanced_site_search", + "data_store_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "dataStoreId" in jsonified_request + assert jsonified_request["dataStoreId"] == "data_store_id_value" + + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_data_store(request) + + expected_params = [ + ( + "dataStoreId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_data_store_rest_unset_required_fields(): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_data_store._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "createAdvancedSiteSearch", + "dataStoreId", + ) + ) + & set( + ( + "parent", + "dataStore", + "dataStoreId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_data_store_rest_interceptors(null_interceptor): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataStoreServiceRestInterceptor(), + ) + client = DataStoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DataStoreServiceRestInterceptor, "post_create_data_store" + ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "pre_create_data_store" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = data_store_service.CreateDataStoreRequest.pb( + data_store_service.CreateDataStoreRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = data_store_service.CreateDataStoreRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_data_store( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_data_store_rest_bad_request( + transport: str = "rest", request_type=data_store_service.CreateDataStoreRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_data_store(request) + + +def test_create_data_store_rest_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + data_store=gcd_data_store.DataStore(name="name_value"), + data_store_id="data_store_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_data_store(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dataStores" % client.transport._host, + args[1], + ) + + +def test_create_data_store_rest_flattened_error(transport: str = "rest"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_store( + data_store_service.CreateDataStoreRequest(), + parent="parent_value", + data_store=gcd_data_store.DataStore(name="name_value"), + data_store_id="data_store_id_value", + ) + + +def test_create_data_store_rest_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.GetDataStoreRequest, + dict, + ], +) +def test_get_data_store_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_store.DataStore( + name="name_value", + display_name="display_name_value", + industry_vertical=common.IndustryVertical.GENERIC, + solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], + default_schema_id="default_schema_id_value", + content_config=data_store.DataStore.ContentConfig.NO_CONTENT, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_store.DataStore.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_data_store(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, data_store.DataStore) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.industry_vertical == common.IndustryVertical.GENERIC + assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + assert response.default_schema_id == "default_schema_id_value" + assert response.content_config == data_store.DataStore.ContentConfig.NO_CONTENT + + +def test_get_data_store_rest_required_fields( + request_type=data_store_service.GetDataStoreRequest, +): + transport_class = transports.DataStoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_store._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_store._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_store.DataStore() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_store.DataStore.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_data_store(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_data_store_rest_unset_required_fields(): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_data_store._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_store_rest_interceptors(null_interceptor): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataStoreServiceRestInterceptor(), + ) + client = DataStoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "post_get_data_store" + ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "pre_get_data_store" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = data_store_service.GetDataStoreRequest.pb( + data_store_service.GetDataStoreRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = data_store.DataStore.to_json(data_store.DataStore()) + + request = data_store_service.GetDataStoreRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_store.DataStore() + + client.get_data_store( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_data_store_rest_bad_request( + transport: str = "rest", request_type=data_store_service.GetDataStoreRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_data_store(request) + + +def test_get_data_store_rest_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_store.DataStore() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_store.DataStore.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_data_store(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*}" % client.transport._host, + args[1], + ) + + +def test_get_data_store_rest_flattened_error(transport: str = "rest"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_store( + data_store_service.GetDataStoreRequest(), + name="name_value", + ) + + +def test_get_data_store_rest_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.ListDataStoresRequest, + dict, + ], +) +def test_list_data_stores_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_store_service.ListDataStoresResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_store_service.ListDataStoresResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_data_stores(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataStoresPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_data_stores_rest_required_fields( + request_type=data_store_service.ListDataStoresRequest, +): + transport_class = transports.DataStoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_stores._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_stores._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_store_service.ListDataStoresResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_store_service.ListDataStoresResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_data_stores(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_data_stores_rest_unset_required_fields(): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_data_stores._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_stores_rest_interceptors(null_interceptor): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataStoreServiceRestInterceptor(), + ) + client = DataStoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "post_list_data_stores" + ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "pre_list_data_stores" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = data_store_service.ListDataStoresRequest.pb( + data_store_service.ListDataStoresRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = data_store_service.ListDataStoresResponse.to_json( + data_store_service.ListDataStoresResponse() + ) + + request = data_store_service.ListDataStoresRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_store_service.ListDataStoresResponse() + + client.list_data_stores( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_data_stores_rest_bad_request( + transport: str = "rest", request_type=data_store_service.ListDataStoresRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_data_stores(request) + + +def test_list_data_stores_rest_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_store_service.ListDataStoresResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_store_service.ListDataStoresResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_data_stores(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dataStores" % client.transport._host, + args[1], + ) + + +def test_list_data_stores_rest_flattened_error(transport: str = "rest"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_stores( + data_store_service.ListDataStoresRequest(), + parent="parent_value", + ) + + +def test_list_data_stores_rest_pager(transport: str = "rest"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + data_store.DataStore(), + ], + next_page_token="abc", + ), + data_store_service.ListDataStoresResponse( + data_stores=[], + next_page_token="def", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + ], + next_page_token="ghi", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + data_store_service.ListDataStoresResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_data_stores(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_store.DataStore) for i in results) + + pages = list(client.list_data_stores(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.DeleteDataStoreRequest, + dict, + ], +) +def test_delete_data_store_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_data_store(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_data_store_rest_required_fields( + request_type=data_store_service.DeleteDataStoreRequest, +): + transport_class = transports.DataStoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_store._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_store._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_data_store(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_data_store_rest_unset_required_fields(): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_data_store._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_data_store_rest_interceptors(null_interceptor): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataStoreServiceRestInterceptor(), + ) + client = DataStoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DataStoreServiceRestInterceptor, "post_delete_data_store" + ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "pre_delete_data_store" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = data_store_service.DeleteDataStoreRequest.pb( + data_store_service.DeleteDataStoreRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = data_store_service.DeleteDataStoreRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_data_store( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_data_store_rest_bad_request( + transport: str = "rest", request_type=data_store_service.DeleteDataStoreRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_data_store(request) + + +def test_delete_data_store_rest_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_data_store(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*}" % client.transport._host, + args[1], + ) + + +def test_delete_data_store_rest_flattened_error(transport: str = "rest"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_store( + data_store_service.DeleteDataStoreRequest(), + name="name_value", + ) + + +def test_delete_data_store_rest_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.UpdateDataStoreRequest, + dict, + ], +) +def test_update_data_store_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": {"name": "projects/sample1/locations/sample2/dataStores/sample3"} + } + request_init["data_store"] = { + "name": "projects/sample1/locations/sample2/dataStores/sample3", + "display_name": "display_name_value", + "industry_vertical": 1, + "solution_types": [1], + "default_schema_id": "default_schema_id_value", + "content_config": 1, + "create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = data_store_service.UpdateDataStoreRequest.meta.fields["data_store"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_store"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_store"][field])): + del request_init["data_store"][field][i][subfield] + else: + del request_init["data_store"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_data_store.DataStore( + name="name_value", + display_name="display_name_value", + industry_vertical=common.IndustryVertical.GENERIC, + solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], + default_schema_id="default_schema_id_value", + content_config=gcd_data_store.DataStore.ContentConfig.NO_CONTENT, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_data_store.DataStore.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_data_store(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_data_store.DataStore) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.industry_vertical == common.IndustryVertical.GENERIC + assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + assert response.default_schema_id == "default_schema_id_value" + assert response.content_config == gcd_data_store.DataStore.ContentConfig.NO_CONTENT + + +def test_update_data_store_rest_required_fields( + request_type=data_store_service.UpdateDataStoreRequest, +): + transport_class = transports.DataStoreServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_store._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_store._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_data_store.DataStore() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcd_data_store.DataStore.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_data_store(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_data_store_rest_unset_required_fields(): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_data_store._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("dataStore",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_data_store_rest_interceptors(null_interceptor): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataStoreServiceRestInterceptor(), + ) + client = DataStoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "post_update_data_store" + ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "pre_update_data_store" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = data_store_service.UpdateDataStoreRequest.pb( + data_store_service.UpdateDataStoreRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_data_store.DataStore.to_json( + gcd_data_store.DataStore() + ) + + request = data_store_service.UpdateDataStoreRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_data_store.DataStore() + + client.update_data_store( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_data_store_rest_bad_request( + transport: str = "rest", request_type=data_store_service.UpdateDataStoreRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": {"name": "projects/sample1/locations/sample2/dataStores/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_data_store(request) + + +def test_update_data_store_rest_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_data_store.DataStore() + + # get arguments that satisfy an http rule for this method + sample_request = { + "data_store": { + "name": "projects/sample1/locations/sample2/dataStores/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + data_store=gcd_data_store.DataStore(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_data_store.DataStore.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_data_store(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{data_store.name=projects/*/locations/*/dataStores/*}" + % client.transport._host, + args[1], + ) + + +def test_update_data_store_rest_flattened_error(transport: str = "rest"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_store( + data_store_service.UpdateDataStoreRequest(), + data_store=gcd_data_store.DataStore(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_data_store_rest_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataStoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataStoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataStoreServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataStoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataStoreServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataStoreServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataStoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataStoreServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataStoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataStoreServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataStoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataStoreServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataStoreServiceGrpcTransport, + transports.DataStoreServiceGrpcAsyncIOTransport, + transports.DataStoreServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DataStoreServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataStoreServiceGrpcTransport, + ) + + +def test_data_store_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataStoreServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_data_store_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1.services.data_store_service.transports.DataStoreServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DataStoreServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_data_store", + "get_data_store", + "list_data_stores", + "delete_data_store", + "update_data_store", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_data_store_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1.services.data_store_service.transports.DataStoreServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataStoreServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_data_store_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1.services.data_store_service.transports.DataStoreServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataStoreServiceTransport() + adc.assert_called_once() + + +def test_data_store_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataStoreServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataStoreServiceGrpcTransport, + transports.DataStoreServiceGrpcAsyncIOTransport, + ], +) +def test_data_store_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataStoreServiceGrpcTransport, + transports.DataStoreServiceGrpcAsyncIOTransport, + transports.DataStoreServiceRestTransport, + ], +) +def test_data_store_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataStoreServiceGrpcTransport, grpc_helpers), + (transports.DataStoreServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_data_store_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataStoreServiceGrpcTransport, + transports.DataStoreServiceGrpcAsyncIOTransport, + ], +) +def test_data_store_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_data_store_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DataStoreServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_data_store_service_rest_lro_client(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_data_store_service_host_no_port(transport_name): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_data_store_service_host_with_port(transport_name): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_data_store_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DataStoreServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DataStoreServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_data_store._session + session2 = client2.transport.create_data_store._session + assert session1 != session2 + session1 = client1.transport.get_data_store._session + session2 = client2.transport.get_data_store._session + assert session1 != session2 + session1 = client1.transport.list_data_stores._session + session2 = client2.transport.list_data_stores._session + assert session1 != session2 + session1 = client1.transport.delete_data_store._session + session2 = client2.transport.delete_data_store._session + assert session1 != session2 + session1 = client1.transport.update_data_store._session + session2 = client2.transport.update_data_store._session + assert session1 != session2 + + +def test_data_store_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataStoreServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_data_store_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataStoreServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataStoreServiceGrpcTransport, + transports.DataStoreServiceGrpcAsyncIOTransport, + ], +) +def test_data_store_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataStoreServiceGrpcTransport, + transports.DataStoreServiceGrpcAsyncIOTransport, + ], +) +def test_data_store_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_store_service_grpc_lro_client(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_store_service_grpc_lro_async_client(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_collection_path(): + project = "squid" + location = "clam" + collection = "whelk" + expected = ( + "projects/{project}/locations/{location}/collections/{collection}".format( + project=project, + location=location, + collection=collection, + ) + ) + actual = DataStoreServiceClient.collection_path(project, location, collection) + assert expected == actual + + +def test_parse_collection_path(): + expected = { + "project": "octopus", + "location": "oyster", + "collection": "nudibranch", + } + path = DataStoreServiceClient.collection_path(**expected) + + # Check that the path construction is reversible. + actual = DataStoreServiceClient.parse_collection_path(path) + assert expected == actual + + +def test_data_store_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = DataStoreServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "data_store": "abalone", + } + path = DataStoreServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = DataStoreServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DataStoreServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = DataStoreServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataStoreServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DataStoreServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = DataStoreServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataStoreServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DataStoreServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = DataStoreServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataStoreServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = DataStoreServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = DataStoreServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataStoreServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DataStoreServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = DataStoreServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataStoreServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DataStoreServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DataStoreServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DataStoreServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DataStoreServiceClient, transports.DataStoreServiceGrpcTransport), + (DataStoreServiceAsyncClient, transports.DataStoreServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_engine_service.py new file mode 100644 index 000000000000..cf56185836a9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_engine_service.py @@ -0,0 +1,5545 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1.services.engine_service import ( + EngineServiceAsyncClient, + EngineServiceClient, + pagers, + transports, +) +from google.cloud.discoveryengine_v1.types import common +from google.cloud.discoveryengine_v1.types import engine +from google.cloud.discoveryengine_v1.types import engine as gcd_engine +from google.cloud.discoveryengine_v1.types import engine_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert EngineServiceClient._get_default_mtls_endpoint(None) is None + assert ( + EngineServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + EngineServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + EngineServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + EngineServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + EngineServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert EngineServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert EngineServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert EngineServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + EngineServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert EngineServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert EngineServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert EngineServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + EngineServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert EngineServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert EngineServiceClient._get_client_cert_source(None, False) is None + assert ( + EngineServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + EngineServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + EngineServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + EngineServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + EngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceClient), +) +@mock.patch.object( + EngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = EngineServiceClient._DEFAULT_UNIVERSE + default_endpoint = EngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = EngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + EngineServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + EngineServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == EngineServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EngineServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + EngineServiceClient._get_api_endpoint(None, None, default_universe, "always") + == EngineServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EngineServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == EngineServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EngineServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + EngineServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + EngineServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + EngineServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + EngineServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + EngineServiceClient._get_universe_domain(None, None) + == EngineServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + EngineServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (EngineServiceClient, transports.EngineServiceGrpcTransport, "grpc"), + (EngineServiceClient, transports.EngineServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (EngineServiceClient, "grpc"), + (EngineServiceAsyncClient, "grpc_asyncio"), + (EngineServiceClient, "rest"), + ], +) +def test_engine_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.EngineServiceGrpcTransport, "grpc"), + (transports.EngineServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.EngineServiceRestTransport, "rest"), + ], +) +def test_engine_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (EngineServiceClient, "grpc"), + (EngineServiceAsyncClient, "grpc_asyncio"), + (EngineServiceClient, "rest"), + ], +) +def test_engine_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_engine_service_client_get_transport_class(): + transport = EngineServiceClient.get_transport_class() + available_transports = [ + transports.EngineServiceGrpcTransport, + transports.EngineServiceRestTransport, + ] + assert transport in available_transports + + transport = EngineServiceClient.get_transport_class("grpc") + assert transport == transports.EngineServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (EngineServiceClient, transports.EngineServiceGrpcTransport, "grpc"), + ( + EngineServiceAsyncClient, + transports.EngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (EngineServiceClient, transports.EngineServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + EngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceClient), +) +@mock.patch.object( + EngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceAsyncClient), +) +def test_engine_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(EngineServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(EngineServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (EngineServiceClient, transports.EngineServiceGrpcTransport, "grpc", "true"), + ( + EngineServiceAsyncClient, + transports.EngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (EngineServiceClient, transports.EngineServiceGrpcTransport, "grpc", "false"), + ( + EngineServiceAsyncClient, + transports.EngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (EngineServiceClient, transports.EngineServiceRestTransport, "rest", "true"), + (EngineServiceClient, transports.EngineServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + EngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceClient), +) +@mock.patch.object( + EngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_engine_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [EngineServiceClient, EngineServiceAsyncClient] +) +@mock.patch.object( + EngineServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(EngineServiceClient), +) +@mock.patch.object( + EngineServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(EngineServiceAsyncClient), +) +def test_engine_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [EngineServiceClient, EngineServiceAsyncClient] +) +@mock.patch.object( + EngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceClient), +) +@mock.patch.object( + EngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceAsyncClient), +) +def test_engine_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = EngineServiceClient._DEFAULT_UNIVERSE + default_endpoint = EngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = EngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (EngineServiceClient, transports.EngineServiceGrpcTransport, "grpc"), + ( + EngineServiceAsyncClient, + transports.EngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (EngineServiceClient, transports.EngineServiceRestTransport, "rest"), + ], +) +def test_engine_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + EngineServiceClient, + transports.EngineServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + EngineServiceAsyncClient, + transports.EngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (EngineServiceClient, transports.EngineServiceRestTransport, "rest", None), + ], +) +def test_engine_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_engine_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1.services.engine_service.transports.EngineServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = EngineServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + EngineServiceClient, + transports.EngineServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + EngineServiceAsyncClient, + transports.EngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_engine_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.CreateEngineRequest, + dict, + ], +) +def test_create_engine(request_type, transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.CreateEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_engine_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_engine), "__call__") as call: + client.create_engine() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.CreateEngineRequest() + + +@pytest.mark.asyncio +async def test_create_engine_async( + transport: str = "grpc_asyncio", request_type=engine_service.CreateEngineRequest +): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.CreateEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_engine_async_from_dict(): + await test_create_engine_async(request_type=dict) + + +def test_create_engine_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.CreateEngineRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_engine), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_engine_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.CreateEngineRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_engine), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_engine_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_engine( + parent="parent_value", + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + engine_id="engine_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].engine + mock_val = gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].engine_id + mock_val = "engine_id_value" + assert arg == mock_val + + +def test_create_engine_flattened_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_engine( + engine_service.CreateEngineRequest(), + parent="parent_value", + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + engine_id="engine_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_engine_flattened_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_engine( + parent="parent_value", + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + engine_id="engine_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].engine + mock_val = gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].engine_id + mock_val = "engine_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_engine_flattened_error_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_engine( + engine_service.CreateEngineRequest(), + parent="parent_value", + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + engine_id="engine_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.DeleteEngineRequest, + dict, + ], +) +def test_delete_engine(request_type, transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.DeleteEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_engine_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_engine), "__call__") as call: + client.delete_engine() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.DeleteEngineRequest() + + +@pytest.mark.asyncio +async def test_delete_engine_async( + transport: str = "grpc_asyncio", request_type=engine_service.DeleteEngineRequest +): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.DeleteEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_engine_async_from_dict(): + await test_delete_engine_async(request_type=dict) + + +def test_delete_engine_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.DeleteEngineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_engine), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_engine_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.DeleteEngineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_engine), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_engine_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_engine( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_engine_flattened_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_engine( + engine_service.DeleteEngineRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_engine_flattened_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_engine( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_engine_flattened_error_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_engine( + engine_service.DeleteEngineRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.UpdateEngineRequest, + dict, + ], +) +def test_update_engine(request_type, transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_engine.Engine( + name="name_value", + display_name="display_name_value", + data_store_ids=["data_store_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + industry_vertical=common.IndustryVertical.GENERIC, + ) + response = client.update_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.UpdateEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_engine.Engine) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_store_ids == ["data_store_ids_value"] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.industry_vertical == common.IndustryVertical.GENERIC + + +def test_update_engine_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_engine), "__call__") as call: + client.update_engine() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.UpdateEngineRequest() + + +@pytest.mark.asyncio +async def test_update_engine_async( + transport: str = "grpc_asyncio", request_type=engine_service.UpdateEngineRequest +): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_engine.Engine( + name="name_value", + display_name="display_name_value", + data_store_ids=["data_store_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + industry_vertical=common.IndustryVertical.GENERIC, + ) + ) + response = await client.update_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.UpdateEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_engine.Engine) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_store_ids == ["data_store_ids_value"] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.industry_vertical == common.IndustryVertical.GENERIC + + +@pytest.mark.asyncio +async def test_update_engine_async_from_dict(): + await test_update_engine_async(request_type=dict) + + +def test_update_engine_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.UpdateEngineRequest() + + request.engine.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_engine), "__call__") as call: + call.return_value = gcd_engine.Engine() + client.update_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "engine.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_engine_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.UpdateEngineRequest() + + request.engine.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_engine), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_engine.Engine()) + await client.update_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "engine.name=name_value", + ) in kw["metadata"] + + +def test_update_engine_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_engine.Engine() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_engine( + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].engine + mock_val = gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_engine_flattened_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_engine( + engine_service.UpdateEngineRequest(), + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_engine_flattened_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_engine.Engine() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_engine.Engine()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_engine( + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].engine + mock_val = gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_engine_flattened_error_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_engine( + engine_service.UpdateEngineRequest(), + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.GetEngineRequest, + dict, + ], +) +def test_get_engine(request_type, transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = engine.Engine( + name="name_value", + display_name="display_name_value", + data_store_ids=["data_store_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + industry_vertical=common.IndustryVertical.GENERIC, + ) + response = client.get_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.GetEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, engine.Engine) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_store_ids == ["data_store_ids_value"] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.industry_vertical == common.IndustryVertical.GENERIC + + +def test_get_engine_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_engine), "__call__") as call: + client.get_engine() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.GetEngineRequest() + + +@pytest.mark.asyncio +async def test_get_engine_async( + transport: str = "grpc_asyncio", request_type=engine_service.GetEngineRequest +): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + engine.Engine( + name="name_value", + display_name="display_name_value", + data_store_ids=["data_store_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + industry_vertical=common.IndustryVertical.GENERIC, + ) + ) + response = await client.get_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.GetEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, engine.Engine) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_store_ids == ["data_store_ids_value"] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.industry_vertical == common.IndustryVertical.GENERIC + + +@pytest.mark.asyncio +async def test_get_engine_async_from_dict(): + await test_get_engine_async(request_type=dict) + + +def test_get_engine_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.GetEngineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_engine), "__call__") as call: + call.return_value = engine.Engine() + client.get_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_engine_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.GetEngineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_engine), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(engine.Engine()) + await client.get_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_engine_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = engine.Engine() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_engine( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_engine_flattened_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_engine( + engine_service.GetEngineRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_engine_flattened_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = engine.Engine() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(engine.Engine()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_engine( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_engine_flattened_error_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_engine( + engine_service.GetEngineRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.ListEnginesRequest, + dict, + ], +) +def test_list_engines(request_type, transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = engine_service.ListEnginesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_engines(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.ListEnginesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnginesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_engines_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + client.list_engines() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.ListEnginesRequest() + + +@pytest.mark.asyncio +async def test_list_engines_async( + transport: str = "grpc_asyncio", request_type=engine_service.ListEnginesRequest +): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + engine_service.ListEnginesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_engines(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.ListEnginesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnginesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_engines_async_from_dict(): + await test_list_engines_async(request_type=dict) + + +def test_list_engines_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.ListEnginesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + call.return_value = engine_service.ListEnginesResponse() + client.list_engines(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_engines_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.ListEnginesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + engine_service.ListEnginesResponse() + ) + await client.list_engines(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_engines_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = engine_service.ListEnginesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_engines( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_engines_flattened_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_engines( + engine_service.ListEnginesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_engines_flattened_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = engine_service.ListEnginesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + engine_service.ListEnginesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_engines( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_engines_flattened_error_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_engines( + engine_service.ListEnginesRequest(), + parent="parent_value", + ) + + +def test_list_engines_pager(transport_name: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + engine.Engine(), + ], + next_page_token="abc", + ), + engine_service.ListEnginesResponse( + engines=[], + next_page_token="def", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + ], + next_page_token="ghi", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_engines(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, engine.Engine) for i in results) + + +def test_list_engines_pages(transport_name: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + engine.Engine(), + ], + next_page_token="abc", + ), + engine_service.ListEnginesResponse( + engines=[], + next_page_token="def", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + ], + next_page_token="ghi", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + ], + ), + RuntimeError, + ) + pages = list(client.list_engines(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_engines_async_pager(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_engines), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + engine.Engine(), + ], + next_page_token="abc", + ), + engine_service.ListEnginesResponse( + engines=[], + next_page_token="def", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + ], + next_page_token="ghi", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_engines( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, engine.Engine) for i in responses) + + +@pytest.mark.asyncio +async def test_list_engines_async_pages(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_engines), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + engine.Engine(), + ], + next_page_token="abc", + ), + engine_service.ListEnginesResponse( + engines=[], + next_page_token="def", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + ], + next_page_token="ghi", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_engines(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.CreateEngineRequest, + dict, + ], +) +def test_create_engine_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/collections/sample3"} + request_init["engine"] = { + "chat_engine_config": { + "agent_creation_config": { + "business": "business_value", + "default_language_code": "default_language_code_value", + "time_zone": "time_zone_value", + "location": "location_value", + }, + "dialogflow_agent_to_link": "dialogflow_agent_to_link_value", + }, + "search_engine_config": {"search_tier": 1, "search_add_ons": [1]}, + "chat_engine_metadata": {"dialogflow_agent": "dialogflow_agent_value"}, + "name": "name_value", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "data_store_ids": ["data_store_ids_value1", "data_store_ids_value2"], + "solution_type": 1, + "industry_vertical": 1, + "common_config": {"company_name": "company_name_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = engine_service.CreateEngineRequest.meta.fields["engine"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["engine"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["engine"][field])): + del request_init["engine"][field][i][subfield] + else: + del request_init["engine"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_engine(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_engine_rest_required_fields( + request_type=engine_service.CreateEngineRequest, +): + transport_class = transports.EngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["engine_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "engineId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "engineId" in jsonified_request + assert jsonified_request["engineId"] == request_init["engine_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["engineId"] = "engine_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_engine._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("engine_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "engineId" in jsonified_request + assert jsonified_request["engineId"] == "engine_id_value" + + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_engine(request) + + expected_params = [ + ( + "engineId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_engine_rest_unset_required_fields(): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_engine._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("engineId",)) + & set( + ( + "parent", + "engine", + "engineId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_engine_rest_interceptors(null_interceptor): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EngineServiceRestInterceptor(), + ) + client = EngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.EngineServiceRestInterceptor, "post_create_engine" + ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "pre_create_engine" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = engine_service.CreateEngineRequest.pb( + engine_service.CreateEngineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = engine_service.CreateEngineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_engine( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_engine_rest_bad_request( + transport: str = "rest", request_type=engine_service.CreateEngineRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/collections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_engine(request) + + +def test_create_engine_rest_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/collections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + engine_id="engine_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_engine(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/collections/*}/engines" + % client.transport._host, + args[1], + ) + + +def test_create_engine_rest_flattened_error(transport: str = "rest"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_engine( + engine_service.CreateEngineRequest(), + parent="parent_value", + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + engine_id="engine_id_value", + ) + + +def test_create_engine_rest_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.DeleteEngineRequest, + dict, + ], +) +def test_delete_engine_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_engine(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_engine_rest_required_fields( + request_type=engine_service.DeleteEngineRequest, +): + transport_class = transports.EngineServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_engine(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_engine_rest_unset_required_fields(): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_engine._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_engine_rest_interceptors(null_interceptor): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EngineServiceRestInterceptor(), + ) + client = EngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.EngineServiceRestInterceptor, "post_delete_engine" + ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "pre_delete_engine" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = engine_service.DeleteEngineRequest.pb( + engine_service.DeleteEngineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = engine_service.DeleteEngineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_engine( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_engine_rest_bad_request( + transport: str = "rest", request_type=engine_service.DeleteEngineRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_engine(request) + + +def test_delete_engine_rest_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_engine(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/collections/*/engines/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_engine_rest_flattened_error(transport: str = "rest"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_engine( + engine_service.DeleteEngineRequest(), + name="name_value", + ) + + +def test_delete_engine_rest_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.UpdateEngineRequest, + dict, + ], +) +def test_update_engine_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "engine": { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + } + request_init["engine"] = { + "chat_engine_config": { + "agent_creation_config": { + "business": "business_value", + "default_language_code": "default_language_code_value", + "time_zone": "time_zone_value", + "location": "location_value", + }, + "dialogflow_agent_to_link": "dialogflow_agent_to_link_value", + }, + "search_engine_config": {"search_tier": 1, "search_add_ons": [1]}, + "chat_engine_metadata": {"dialogflow_agent": "dialogflow_agent_value"}, + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "data_store_ids": ["data_store_ids_value1", "data_store_ids_value2"], + "solution_type": 1, + "industry_vertical": 1, + "common_config": {"company_name": "company_name_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = engine_service.UpdateEngineRequest.meta.fields["engine"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["engine"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["engine"][field])): + del request_init["engine"][field][i][subfield] + else: + del request_init["engine"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_engine.Engine( + name="name_value", + display_name="display_name_value", + data_store_ids=["data_store_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + industry_vertical=common.IndustryVertical.GENERIC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_engine.Engine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_engine(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_engine.Engine) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_store_ids == ["data_store_ids_value"] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.industry_vertical == common.IndustryVertical.GENERIC + + +def test_update_engine_rest_required_fields( + request_type=engine_service.UpdateEngineRequest, +): + transport_class = transports.EngineServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_engine._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_engine.Engine() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcd_engine.Engine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_engine(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_engine_rest_unset_required_fields(): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_engine._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("engine",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_engine_rest_interceptors(null_interceptor): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EngineServiceRestInterceptor(), + ) + client = EngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_update_engine" + ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "pre_update_engine" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = engine_service.UpdateEngineRequest.pb( + engine_service.UpdateEngineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_engine.Engine.to_json(gcd_engine.Engine()) + + request = engine_service.UpdateEngineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_engine.Engine() + + client.update_engine( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_engine_rest_bad_request( + transport: str = "rest", request_type=engine_service.UpdateEngineRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "engine": { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_engine(request) + + +def test_update_engine_rest_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_engine.Engine() + + # get arguments that satisfy an http rule for this method + sample_request = { + "engine": { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_engine.Engine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_engine(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{engine.name=projects/*/locations/*/collections/*/engines/*}" + % client.transport._host, + args[1], + ) + + +def test_update_engine_rest_flattened_error(transport: str = "rest"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_engine( + engine_service.UpdateEngineRequest(), + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_engine_rest_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.GetEngineRequest, + dict, + ], +) +def test_get_engine_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = engine.Engine( + name="name_value", + display_name="display_name_value", + data_store_ids=["data_store_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + industry_vertical=common.IndustryVertical.GENERIC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = engine.Engine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_engine(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, engine.Engine) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_store_ids == ["data_store_ids_value"] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.industry_vertical == common.IndustryVertical.GENERIC + + +def test_get_engine_rest_required_fields(request_type=engine_service.GetEngineRequest): + transport_class = transports.EngineServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = engine.Engine() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = engine.Engine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_engine(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_engine_rest_unset_required_fields(): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_engine._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_engine_rest_interceptors(null_interceptor): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EngineServiceRestInterceptor(), + ) + client = EngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_get_engine" + ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "pre_get_engine" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = engine_service.GetEngineRequest.pb( + engine_service.GetEngineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = engine.Engine.to_json(engine.Engine()) + + request = engine_service.GetEngineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = engine.Engine() + + client.get_engine( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_engine_rest_bad_request( + transport: str = "rest", request_type=engine_service.GetEngineRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_engine(request) + + +def test_get_engine_rest_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = engine.Engine() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = engine.Engine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_engine(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/collections/*/engines/*}" + % client.transport._host, + args[1], + ) + + +def test_get_engine_rest_flattened_error(transport: str = "rest"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_engine( + engine_service.GetEngineRequest(), + name="name_value", + ) + + +def test_get_engine_rest_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.ListEnginesRequest, + dict, + ], +) +def test_list_engines_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/collections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = engine_service.ListEnginesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = engine_service.ListEnginesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_engines(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnginesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_engines_rest_required_fields( + request_type=engine_service.ListEnginesRequest, +): + transport_class = transports.EngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_engines._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_engines._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = engine_service.ListEnginesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = engine_service.ListEnginesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_engines(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_engines_rest_unset_required_fields(): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_engines._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_engines_rest_interceptors(null_interceptor): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EngineServiceRestInterceptor(), + ) + client = EngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_list_engines" + ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "pre_list_engines" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = engine_service.ListEnginesRequest.pb( + engine_service.ListEnginesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = engine_service.ListEnginesResponse.to_json( + engine_service.ListEnginesResponse() + ) + + request = engine_service.ListEnginesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = engine_service.ListEnginesResponse() + + client.list_engines( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_engines_rest_bad_request( + transport: str = "rest", request_type=engine_service.ListEnginesRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/collections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_engines(request) + + +def test_list_engines_rest_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = engine_service.ListEnginesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/collections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = engine_service.ListEnginesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_engines(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/collections/*}/engines" + % client.transport._host, + args[1], + ) + + +def test_list_engines_rest_flattened_error(transport: str = "rest"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_engines( + engine_service.ListEnginesRequest(), + parent="parent_value", + ) + + +def test_list_engines_rest_pager(transport: str = "rest"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + engine.Engine(), + ], + next_page_token="abc", + ), + engine_service.ListEnginesResponse( + engines=[], + next_page_token="def", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + ], + next_page_token="ghi", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + engine_service.ListEnginesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/collections/sample3" + } + + pager = client.list_engines(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, engine.Engine) for i in results) + + pages = list(client.list_engines(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.EngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.EngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EngineServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.EngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EngineServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EngineServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.EngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EngineServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.EngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = EngineServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.EngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.EngineServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EngineServiceGrpcTransport, + transports.EngineServiceGrpcAsyncIOTransport, + transports.EngineServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = EngineServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.EngineServiceGrpcTransport, + ) + + +def test_engine_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.EngineServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_engine_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1.services.engine_service.transports.EngineServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.EngineServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_engine", + "delete_engine", + "update_engine", + "get_engine", + "list_engines", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_engine_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1.services.engine_service.transports.EngineServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EngineServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_engine_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1.services.engine_service.transports.EngineServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EngineServiceTransport() + adc.assert_called_once() + + +def test_engine_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + EngineServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EngineServiceGrpcTransport, + transports.EngineServiceGrpcAsyncIOTransport, + ], +) +def test_engine_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EngineServiceGrpcTransport, + transports.EngineServiceGrpcAsyncIOTransport, + transports.EngineServiceRestTransport, + ], +) +def test_engine_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.EngineServiceGrpcTransport, grpc_helpers), + (transports.EngineServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_engine_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EngineServiceGrpcTransport, + transports.EngineServiceGrpcAsyncIOTransport, + ], +) +def test_engine_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_engine_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.EngineServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_engine_service_rest_lro_client(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_engine_service_host_no_port(transport_name): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_engine_service_host_with_port(transport_name): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_engine_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = EngineServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = EngineServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_engine._session + session2 = client2.transport.create_engine._session + assert session1 != session2 + session1 = client1.transport.delete_engine._session + session2 = client2.transport.delete_engine._session + assert session1 != session2 + session1 = client1.transport.update_engine._session + session2 = client2.transport.update_engine._session + assert session1 != session2 + session1 = client1.transport.get_engine._session + session2 = client2.transport.get_engine._session + assert session1 != session2 + session1 = client1.transport.list_engines._session + session2 = client2.transport.list_engines._session + assert session1 != session2 + + +def test_engine_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.EngineServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_engine_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.EngineServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.EngineServiceGrpcTransport, + transports.EngineServiceGrpcAsyncIOTransport, + ], +) +def test_engine_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.EngineServiceGrpcTransport, + transports.EngineServiceGrpcAsyncIOTransport, + ], +) +def test_engine_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_engine_service_grpc_lro_client(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_engine_service_grpc_lro_async_client(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_collection_path(): + project = "squid" + location = "clam" + collection = "whelk" + expected = ( + "projects/{project}/locations/{location}/collections/{collection}".format( + project=project, + location=location, + collection=collection, + ) + ) + actual = EngineServiceClient.collection_path(project, location, collection) + assert expected == actual + + +def test_parse_collection_path(): + expected = { + "project": "octopus", + "location": "oyster", + "collection": "nudibranch", + } + path = EngineServiceClient.collection_path(**expected) + + # Check that the path construction is reversible. + actual = EngineServiceClient.parse_collection_path(path) + assert expected == actual + + +def test_engine_path(): + project = "cuttlefish" + location = "mussel" + collection = "winkle" + engine = "nautilus" + expected = "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + actual = EngineServiceClient.engine_path(project, location, collection, engine) + assert expected == actual + + +def test_parse_engine_path(): + expected = { + "project": "scallop", + "location": "abalone", + "collection": "squid", + "engine": "clam", + } + path = EngineServiceClient.engine_path(**expected) + + # Check that the path construction is reversible. + actual = EngineServiceClient.parse_engine_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = EngineServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = EngineServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = EngineServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = EngineServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = EngineServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = EngineServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = EngineServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = EngineServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = EngineServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = EngineServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = EngineServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = EngineServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = EngineServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = EngineServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = EngineServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.EngineServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.EngineServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = EngineServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (EngineServiceClient, transports.EngineServiceGrpcTransport), + (EngineServiceAsyncClient, transports.EngineServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_site_search_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_site_search_engine_service.py new file mode 100644 index 000000000000..55a7096412c1 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_site_search_engine_service.py @@ -0,0 +1,8671 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1.services.site_search_engine_service import ( + SiteSearchEngineServiceAsyncClient, + SiteSearchEngineServiceClient, + pagers, + transports, +) +from google.cloud.discoveryengine_v1.types import ( + site_search_engine, + site_search_engine_service, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SiteSearchEngineServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SiteSearchEngineServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SiteSearchEngineServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SiteSearchEngineServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SiteSearchEngineServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SiteSearchEngineServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert SiteSearchEngineServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SiteSearchEngineServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert SiteSearchEngineServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + SiteSearchEngineServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert SiteSearchEngineServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert SiteSearchEngineServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert SiteSearchEngineServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + SiteSearchEngineServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert SiteSearchEngineServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert SiteSearchEngineServiceClient._get_client_cert_source(None, False) is None + assert ( + SiteSearchEngineServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + SiteSearchEngineServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + SiteSearchEngineServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + SiteSearchEngineServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + SiteSearchEngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceClient), +) +@mock.patch.object( + SiteSearchEngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = SiteSearchEngineServiceClient._DEFAULT_UNIVERSE + default_endpoint = SiteSearchEngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SiteSearchEngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + SiteSearchEngineServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + SiteSearchEngineServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == SiteSearchEngineServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SiteSearchEngineServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + SiteSearchEngineServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == SiteSearchEngineServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SiteSearchEngineServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == SiteSearchEngineServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SiteSearchEngineServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + SiteSearchEngineServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + SiteSearchEngineServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + SiteSearchEngineServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + SiteSearchEngineServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + SiteSearchEngineServiceClient._get_universe_domain(None, None) + == SiteSearchEngineServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + SiteSearchEngineServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + "grpc", + ), + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SiteSearchEngineServiceClient, "grpc"), + (SiteSearchEngineServiceAsyncClient, "grpc_asyncio"), + (SiteSearchEngineServiceClient, "rest"), + ], +) +def test_site_search_engine_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SiteSearchEngineServiceGrpcTransport, "grpc"), + (transports.SiteSearchEngineServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SiteSearchEngineServiceRestTransport, "rest"), + ], +) +def test_site_search_engine_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SiteSearchEngineServiceClient, "grpc"), + (SiteSearchEngineServiceAsyncClient, "grpc_asyncio"), + (SiteSearchEngineServiceClient, "rest"), + ], +) +def test_site_search_engine_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_site_search_engine_service_client_get_transport_class(): + transport = SiteSearchEngineServiceClient.get_transport_class() + available_transports = [ + transports.SiteSearchEngineServiceGrpcTransport, + transports.SiteSearchEngineServiceRestTransport, + ] + assert transport in available_transports + + transport = SiteSearchEngineServiceClient.get_transport_class("grpc") + assert transport == transports.SiteSearchEngineServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + "grpc", + ), + ( + SiteSearchEngineServiceAsyncClient, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + SiteSearchEngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceClient), +) +@mock.patch.object( + SiteSearchEngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceAsyncClient), +) +def test_site_search_engine_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SiteSearchEngineServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SiteSearchEngineServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + "grpc", + "true", + ), + ( + SiteSearchEngineServiceAsyncClient, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + "grpc", + "false", + ), + ( + SiteSearchEngineServiceAsyncClient, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceRestTransport, + "rest", + "true", + ), + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + SiteSearchEngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceClient), +) +@mock.patch.object( + SiteSearchEngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_site_search_engine_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [SiteSearchEngineServiceClient, SiteSearchEngineServiceAsyncClient] +) +@mock.patch.object( + SiteSearchEngineServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SiteSearchEngineServiceClient), +) +@mock.patch.object( + SiteSearchEngineServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SiteSearchEngineServiceAsyncClient), +) +def test_site_search_engine_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [SiteSearchEngineServiceClient, SiteSearchEngineServiceAsyncClient] +) +@mock.patch.object( + SiteSearchEngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceClient), +) +@mock.patch.object( + SiteSearchEngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceAsyncClient), +) +def test_site_search_engine_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = SiteSearchEngineServiceClient._DEFAULT_UNIVERSE + default_endpoint = SiteSearchEngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SiteSearchEngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + "grpc", + ), + ( + SiteSearchEngineServiceAsyncClient, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceRestTransport, + "rest", + ), + ], +) +def test_site_search_engine_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SiteSearchEngineServiceAsyncClient, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceRestTransport, + "rest", + None, + ), + ], +) +def test_site_search_engine_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_site_search_engine_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1.services.site_search_engine_service.transports.SiteSearchEngineServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SiteSearchEngineServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SiteSearchEngineServiceAsyncClient, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_site_search_engine_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.GetSiteSearchEngineRequest, + dict, + ], +) +def test_get_site_search_engine(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.SiteSearchEngine( + name="name_value", + ) + response = client.get_site_search_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetSiteSearchEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.SiteSearchEngine) + assert response.name == "name_value" + + +def test_get_site_search_engine_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + client.get_site_search_engine() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetSiteSearchEngineRequest() + + +@pytest.mark.asyncio +async def test_get_site_search_engine_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.GetSiteSearchEngineRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.SiteSearchEngine( + name="name_value", + ) + ) + response = await client.get_site_search_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetSiteSearchEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.SiteSearchEngine) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_site_search_engine_async_from_dict(): + await test_get_site_search_engine_async(request_type=dict) + + +def test_get_site_search_engine_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.GetSiteSearchEngineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + call.return_value = site_search_engine.SiteSearchEngine() + client.get_site_search_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_site_search_engine_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.GetSiteSearchEngineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.SiteSearchEngine() + ) + await client.get_site_search_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_site_search_engine_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.SiteSearchEngine() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_site_search_engine( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_site_search_engine_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_site_search_engine( + site_search_engine_service.GetSiteSearchEngineRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_site_search_engine_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.SiteSearchEngine() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.SiteSearchEngine() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_site_search_engine( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_site_search_engine_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_site_search_engine( + site_search_engine_service.GetSiteSearchEngineRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.CreateTargetSiteRequest, + dict, + ], +) +def test_create_target_site(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.CreateTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_target_site_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + client.create_target_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.CreateTargetSiteRequest() + + +@pytest.mark.asyncio +async def test_create_target_site_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.CreateTargetSiteRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.CreateTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_target_site_async_from_dict(): + await test_create_target_site_async(request_type=dict) + + +def test_create_target_site_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.CreateTargetSiteRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_target_site_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.CreateTargetSiteRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_target_site_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_target_site( + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].target_site + mock_val = site_search_engine.TargetSite(name="name_value") + assert arg == mock_val + + +def test_create_target_site_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_target_site( + site_search_engine_service.CreateTargetSiteRequest(), + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_target_site_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_target_site( + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].target_site + mock_val = site_search_engine.TargetSite(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_target_site_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_target_site( + site_search_engine_service.CreateTargetSiteRequest(), + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.BatchCreateTargetSitesRequest, + dict, + ], +) +def test_batch_create_target_sites(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.batch_create_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchCreateTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_batch_create_target_sites_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_target_sites), "__call__" + ) as call: + client.batch_create_target_sites() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchCreateTargetSitesRequest() + + +@pytest.mark.asyncio +async def test_batch_create_target_sites_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.BatchCreateTargetSitesRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.batch_create_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchCreateTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_batch_create_target_sites_async_from_dict(): + await test_batch_create_target_sites_async(request_type=dict) + + +def test_batch_create_target_sites_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.BatchCreateTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_target_sites), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.batch_create_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_create_target_sites_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.BatchCreateTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_target_sites), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.batch_create_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.GetTargetSiteRequest, + dict, + ], +) +def test_get_target_site(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.TargetSite( + name="name_value", + provided_uri_pattern="provided_uri_pattern_value", + type_=site_search_engine.TargetSite.Type.INCLUDE, + exact_match=True, + generated_uri_pattern="generated_uri_pattern_value", + indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, + ) + response = client.get_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.TargetSite) + assert response.name == "name_value" + assert response.provided_uri_pattern == "provided_uri_pattern_value" + assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE + assert response.exact_match is True + assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert ( + response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING + ) + + +def test_get_target_site_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + client.get_target_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetTargetSiteRequest() + + +@pytest.mark.asyncio +async def test_get_target_site_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.GetTargetSiteRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.TargetSite( + name="name_value", + provided_uri_pattern="provided_uri_pattern_value", + type_=site_search_engine.TargetSite.Type.INCLUDE, + exact_match=True, + generated_uri_pattern="generated_uri_pattern_value", + indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, + ) + ) + response = await client.get_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.TargetSite) + assert response.name == "name_value" + assert response.provided_uri_pattern == "provided_uri_pattern_value" + assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE + assert response.exact_match is True + assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert ( + response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING + ) + + +@pytest.mark.asyncio +async def test_get_target_site_async_from_dict(): + await test_get_target_site_async(request_type=dict) + + +def test_get_target_site_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.GetTargetSiteRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + call.return_value = site_search_engine.TargetSite() + client.get_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_target_site_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.GetTargetSiteRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.TargetSite() + ) + await client.get_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_target_site_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.TargetSite() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_target_site( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_target_site_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_target_site( + site_search_engine_service.GetTargetSiteRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_target_site_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.TargetSite() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.TargetSite() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_target_site( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_target_site_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_target_site( + site_search_engine_service.GetTargetSiteRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.UpdateTargetSiteRequest, + dict, + ], +) +def test_update_target_site(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.UpdateTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_target_site_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + client.update_target_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.UpdateTargetSiteRequest() + + +@pytest.mark.asyncio +async def test_update_target_site_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.UpdateTargetSiteRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.UpdateTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_target_site_async_from_dict(): + await test_update_target_site_async(request_type=dict) + + +def test_update_target_site_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.UpdateTargetSiteRequest() + + request.target_site.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "target_site.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_target_site_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.UpdateTargetSiteRequest() + + request.target_site.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "target_site.name=name_value", + ) in kw["metadata"] + + +def test_update_target_site_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_target_site( + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].target_site + mock_val = site_search_engine.TargetSite(name="name_value") + assert arg == mock_val + + +def test_update_target_site_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_target_site( + site_search_engine_service.UpdateTargetSiteRequest(), + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_update_target_site_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_target_site( + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].target_site + mock_val = site_search_engine.TargetSite(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_target_site_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_target_site( + site_search_engine_service.UpdateTargetSiteRequest(), + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.DeleteTargetSiteRequest, + dict, + ], +) +def test_delete_target_site(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DeleteTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_target_site_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + client.delete_target_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DeleteTargetSiteRequest() + + +@pytest.mark.asyncio +async def test_delete_target_site_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.DeleteTargetSiteRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DeleteTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_target_site_async_from_dict(): + await test_delete_target_site_async(request_type=dict) + + +def test_delete_target_site_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.DeleteTargetSiteRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_target_site_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.DeleteTargetSiteRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_target_site_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_target_site( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_target_site_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_target_site( + site_search_engine_service.DeleteTargetSiteRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_target_site_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_target_site( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_target_site_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_target_site( + site_search_engine_service.DeleteTargetSiteRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.ListTargetSitesRequest, + dict, + ], +) +def test_list_target_sites(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine_service.ListTargetSitesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + response = client.list_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.ListTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTargetSitesPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_target_sites_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + client.list_target_sites() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.ListTargetSitesRequest() + + +@pytest.mark.asyncio +async def test_list_target_sites_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.ListTargetSitesRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine_service.ListTargetSitesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.list_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.ListTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTargetSitesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_list_target_sites_async_from_dict(): + await test_list_target_sites_async(request_type=dict) + + +def test_list_target_sites_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.ListTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + call.return_value = site_search_engine_service.ListTargetSitesResponse() + client.list_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_target_sites_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.ListTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine_service.ListTargetSitesResponse() + ) + await client.list_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_target_sites_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine_service.ListTargetSitesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_target_sites( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_target_sites_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_target_sites( + site_search_engine_service.ListTargetSitesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_target_sites_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine_service.ListTargetSitesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine_service.ListTargetSitesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_target_sites( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_target_sites_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_target_sites( + site_search_engine_service.ListTargetSitesRequest(), + parent="parent_value", + ) + + +def test_list_target_sites_pager(transport_name: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_target_sites(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in results) + + +def test_list_target_sites_pages(transport_name: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + pages = list(client.list_target_sites(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_target_sites_async_pager(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_target_sites( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in responses) + + +@pytest.mark.asyncio +async def test_list_target_sites_async_pages(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_target_sites(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.EnableAdvancedSiteSearchRequest, + dict, + ], +) +def test_enable_advanced_site_search(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_advanced_site_search), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.enable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.EnableAdvancedSiteSearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_enable_advanced_site_search_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_advanced_site_search), "__call__" + ) as call: + client.enable_advanced_site_search() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.EnableAdvancedSiteSearchRequest() + + +@pytest.mark.asyncio +async def test_enable_advanced_site_search_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.EnableAdvancedSiteSearchRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_advanced_site_search), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.enable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.EnableAdvancedSiteSearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_enable_advanced_site_search_async_from_dict(): + await test_enable_advanced_site_search_async(request_type=dict) + + +def test_enable_advanced_site_search_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.EnableAdvancedSiteSearchRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_advanced_site_search), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.enable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_enable_advanced_site_search_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.EnableAdvancedSiteSearchRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_advanced_site_search), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.enable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.DisableAdvancedSiteSearchRequest, + dict, + ], +) +def test_disable_advanced_site_search(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_advanced_site_search), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.disable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DisableAdvancedSiteSearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_disable_advanced_site_search_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_advanced_site_search), "__call__" + ) as call: + client.disable_advanced_site_search() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DisableAdvancedSiteSearchRequest() + + +@pytest.mark.asyncio +async def test_disable_advanced_site_search_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.DisableAdvancedSiteSearchRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_advanced_site_search), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.disable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DisableAdvancedSiteSearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_disable_advanced_site_search_async_from_dict(): + await test_disable_advanced_site_search_async(request_type=dict) + + +def test_disable_advanced_site_search_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.DisableAdvancedSiteSearchRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_advanced_site_search), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.disable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_disable_advanced_site_search_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.DisableAdvancedSiteSearchRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_advanced_site_search), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.disable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.RecrawlUrisRequest, + dict, + ], +) +def test_recrawl_uris(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.recrawl_uris(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.RecrawlUrisRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_recrawl_uris_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + client.recrawl_uris() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.RecrawlUrisRequest() + + +@pytest.mark.asyncio +async def test_recrawl_uris_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.RecrawlUrisRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.recrawl_uris(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.RecrawlUrisRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_recrawl_uris_async_from_dict(): + await test_recrawl_uris_async(request_type=dict) + + +def test_recrawl_uris_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.RecrawlUrisRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.recrawl_uris(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_recrawl_uris_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.RecrawlUrisRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.recrawl_uris(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.BatchVerifyTargetSitesRequest, + dict, + ], +) +def test_batch_verify_target_sites(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_verify_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.batch_verify_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchVerifyTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_batch_verify_target_sites_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_verify_target_sites), "__call__" + ) as call: + client.batch_verify_target_sites() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchVerifyTargetSitesRequest() + + +@pytest.mark.asyncio +async def test_batch_verify_target_sites_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.BatchVerifyTargetSitesRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_verify_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.batch_verify_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchVerifyTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_batch_verify_target_sites_async_from_dict(): + await test_batch_verify_target_sites_async(request_type=dict) + + +def test_batch_verify_target_sites_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.BatchVerifyTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_verify_target_sites), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.batch_verify_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_verify_target_sites_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.BatchVerifyTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_verify_target_sites), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.batch_verify_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.FetchDomainVerificationStatusRequest, + dict, + ], +) +def test_fetch_domain_verification_status(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = client.fetch_domain_verification_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert ( + args[0] == site_search_engine_service.FetchDomainVerificationStatusRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchDomainVerificationStatusPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_fetch_domain_verification_status_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + client.fetch_domain_verification_status() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] == site_search_engine_service.FetchDomainVerificationStatusRequest() + ) + + +@pytest.mark.asyncio +async def test_fetch_domain_verification_status_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.FetchDomainVerificationStatusRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine_service.FetchDomainVerificationStatusResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.fetch_domain_verification_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert ( + args[0] == site_search_engine_service.FetchDomainVerificationStatusRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchDomainVerificationStatusAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_fetch_domain_verification_status_async_from_dict(): + await test_fetch_domain_verification_status_async(request_type=dict) + + +def test_fetch_domain_verification_status_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.FetchDomainVerificationStatusRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + call.return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse() + ) + client.fetch_domain_verification_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_domain_verification_status_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.FetchDomainVerificationStatusRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine_service.FetchDomainVerificationStatusResponse() + ) + await client.fetch_domain_verification_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +def test_fetch_domain_verification_status_pager(transport_name: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("site_search_engine", ""),)), + ) + pager = client.fetch_domain_verification_status(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in results) + + +def test_fetch_domain_verification_status_pages(transport_name: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_domain_verification_status(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_fetch_domain_verification_status_async_pager(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_domain_verification_status( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in responses) + + +@pytest.mark.asyncio +async def test_fetch_domain_verification_status_async_pages(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.fetch_domain_verification_status(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.GetSiteSearchEngineRequest, + dict, + ], +) +def test_get_site_search_engine_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine.SiteSearchEngine( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine.SiteSearchEngine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_site_search_engine(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.SiteSearchEngine) + assert response.name == "name_value" + + +def test_get_site_search_engine_rest_required_fields( + request_type=site_search_engine_service.GetSiteSearchEngineRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_site_search_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_site_search_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = site_search_engine.SiteSearchEngine() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = site_search_engine.SiteSearchEngine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_site_search_engine(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_site_search_engine_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_site_search_engine._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_site_search_engine_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_get_site_search_engine" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_get_site_search_engine" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.GetSiteSearchEngineRequest.pb( + site_search_engine_service.GetSiteSearchEngineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = site_search_engine.SiteSearchEngine.to_json( + site_search_engine.SiteSearchEngine() + ) + + request = site_search_engine_service.GetSiteSearchEngineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = site_search_engine.SiteSearchEngine() + + client.get_site_search_engine( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_site_search_engine_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.GetSiteSearchEngineRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_site_search_engine(request) + + +def test_get_site_search_engine_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine.SiteSearchEngine() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine.SiteSearchEngine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_site_search_engine(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*/siteSearchEngine}" + % client.transport._host, + args[1], + ) + + +def test_get_site_search_engine_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_site_search_engine( + site_search_engine_service.GetSiteSearchEngineRequest(), + name="name_value", + ) + + +def test_get_site_search_engine_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.CreateTargetSiteRequest, + dict, + ], +) +def test_create_target_site_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request_init["target_site"] = { + "name": "name_value", + "provided_uri_pattern": "provided_uri_pattern_value", + "type_": 1, + "exact_match": True, + "generated_uri_pattern": "generated_uri_pattern_value", + "site_verification_info": { + "site_verification_state": 1, + "verify_time": {"seconds": 751, "nanos": 543}, + }, + "indexing_status": 1, + "update_time": {}, + "failure_reason": {"quota_failure": {"total_required_quota": 2157}}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = site_search_engine_service.CreateTargetSiteRequest.meta.fields[ + "target_site" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["target_site"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target_site"][field])): + del request_init["target_site"][field][i][subfield] + else: + del request_init["target_site"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_target_site(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_target_site_rest_required_fields( + request_type=site_search_engine_service.CreateTargetSiteRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_target_site(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_target_site_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_target_site._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "targetSite", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_target_site_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_create_target_site" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_create_target_site" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.CreateTargetSiteRequest.pb( + site_search_engine_service.CreateTargetSiteRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.CreateTargetSiteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_target_site( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_target_site_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.CreateTargetSiteRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_target_site(request) + + +def test_create_target_site_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_target_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/dataStores/*/siteSearchEngine}/targetSites" + % client.transport._host, + args[1], + ) + + +def test_create_target_site_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_target_site( + site_search_engine_service.CreateTargetSiteRequest(), + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +def test_create_target_site_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.BatchCreateTargetSitesRequest, + dict, + ], +) +def test_batch_create_target_sites_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_create_target_sites(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_create_target_sites_rest_required_fields( + request_type=site_search_engine_service.BatchCreateTargetSitesRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_target_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_target_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_create_target_sites(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_create_target_sites_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_create_target_sites._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_create_target_sites_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_batch_create_target_sites", + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "pre_batch_create_target_sites", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.BatchCreateTargetSitesRequest.pb( + site_search_engine_service.BatchCreateTargetSitesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.BatchCreateTargetSitesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_create_target_sites( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_create_target_sites_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.BatchCreateTargetSitesRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_create_target_sites(request) + + +def test_batch_create_target_sites_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.GetTargetSiteRequest, + dict, + ], +) +def test_get_target_site_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine.TargetSite( + name="name_value", + provided_uri_pattern="provided_uri_pattern_value", + type_=site_search_engine.TargetSite.Type.INCLUDE, + exact_match=True, + generated_uri_pattern="generated_uri_pattern_value", + indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine.TargetSite.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_target_site(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.TargetSite) + assert response.name == "name_value" + assert response.provided_uri_pattern == "provided_uri_pattern_value" + assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE + assert response.exact_match is True + assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert ( + response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING + ) + + +def test_get_target_site_rest_required_fields( + request_type=site_search_engine_service.GetTargetSiteRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = site_search_engine.TargetSite() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = site_search_engine.TargetSite.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_target_site(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_target_site_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_target_site._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_target_site_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_get_target_site" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_get_target_site" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.GetTargetSiteRequest.pb( + site_search_engine_service.GetTargetSiteRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = site_search_engine.TargetSite.to_json( + site_search_engine.TargetSite() + ) + + request = site_search_engine_service.GetTargetSiteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = site_search_engine.TargetSite() + + client.get_target_site( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_target_site_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.GetTargetSiteRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_target_site(request) + + +def test_get_target_site_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine.TargetSite() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine.TargetSite.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_target_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}" + % client.transport._host, + args[1], + ) + + +def test_get_target_site_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_target_site( + site_search_engine_service.GetTargetSiteRequest(), + name="name_value", + ) + + +def test_get_target_site_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.UpdateTargetSiteRequest, + dict, + ], +) +def test_update_target_site_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "target_site": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + } + request_init["target_site"] = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4", + "provided_uri_pattern": "provided_uri_pattern_value", + "type_": 1, + "exact_match": True, + "generated_uri_pattern": "generated_uri_pattern_value", + "site_verification_info": { + "site_verification_state": 1, + "verify_time": {"seconds": 751, "nanos": 543}, + }, + "indexing_status": 1, + "update_time": {}, + "failure_reason": {"quota_failure": {"total_required_quota": 2157}}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = site_search_engine_service.UpdateTargetSiteRequest.meta.fields[ + "target_site" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["target_site"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target_site"][field])): + del request_init["target_site"][field][i][subfield] + else: + del request_init["target_site"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_target_site(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_target_site_rest_required_fields( + request_type=site_search_engine_service.UpdateTargetSiteRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_target_site(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_target_site_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_target_site._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("targetSite",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_target_site_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_update_target_site" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_update_target_site" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.UpdateTargetSiteRequest.pb( + site_search_engine_service.UpdateTargetSiteRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.UpdateTargetSiteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_target_site( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_target_site_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.UpdateTargetSiteRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "target_site": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_target_site(request) + + +def test_update_target_site_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "target_site": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + target_site=site_search_engine.TargetSite(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_target_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{target_site.name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}" + % client.transport._host, + args[1], + ) + + +def test_update_target_site_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_target_site( + site_search_engine_service.UpdateTargetSiteRequest(), + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +def test_update_target_site_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.DeleteTargetSiteRequest, + dict, + ], +) +def test_delete_target_site_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_target_site(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_target_site_rest_required_fields( + request_type=site_search_engine_service.DeleteTargetSiteRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_target_site(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_target_site_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_target_site._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_target_site_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_delete_target_site" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_delete_target_site" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.DeleteTargetSiteRequest.pb( + site_search_engine_service.DeleteTargetSiteRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.DeleteTargetSiteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_target_site( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_target_site_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.DeleteTargetSiteRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_target_site(request) + + +def test_delete_target_site_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_target_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_target_site_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_target_site( + site_search_engine_service.DeleteTargetSiteRequest(), + name="name_value", + ) + + +def test_delete_target_site_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.ListTargetSitesRequest, + dict, + ], +) +def test_list_target_sites_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine_service.ListTargetSitesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine_service.ListTargetSitesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_target_sites(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTargetSitesPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_target_sites_rest_required_fields( + request_type=site_search_engine_service.ListTargetSitesRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_target_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_target_sites._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = site_search_engine_service.ListTargetSitesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = site_search_engine_service.ListTargetSitesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_target_sites(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_target_sites_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_target_sites._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_target_sites_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_list_target_sites" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_list_target_sites" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.ListTargetSitesRequest.pb( + site_search_engine_service.ListTargetSitesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + site_search_engine_service.ListTargetSitesResponse.to_json( + site_search_engine_service.ListTargetSitesResponse() + ) + ) + + request = site_search_engine_service.ListTargetSitesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = site_search_engine_service.ListTargetSitesResponse() + + client.list_target_sites( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_target_sites_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.ListTargetSitesRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_target_sites(request) + + +def test_list_target_sites_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine_service.ListTargetSitesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine_service.ListTargetSitesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_target_sites(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/dataStores/*/siteSearchEngine}/targetSites" + % client.transport._host, + args[1], + ) + + +def test_list_target_sites_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_target_sites( + site_search_engine_service.ListTargetSitesRequest(), + parent="parent_value", + ) + + +def test_list_target_sites_rest_pager(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + site_search_engine_service.ListTargetSitesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + + pager = client.list_target_sites(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in results) + + pages = list(client.list_target_sites(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.EnableAdvancedSiteSearchRequest, + dict, + ], +) +def test_enable_advanced_site_search_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.enable_advanced_site_search(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_enable_advanced_site_search_rest_required_fields( + request_type=site_search_engine_service.EnableAdvancedSiteSearchRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["site_search_engine"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_advanced_site_search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["siteSearchEngine"] = "site_search_engine_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_advanced_site_search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "siteSearchEngine" in jsonified_request + assert jsonified_request["siteSearchEngine"] == "site_search_engine_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.enable_advanced_site_search(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_enable_advanced_site_search_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.enable_advanced_site_search._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("siteSearchEngine",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enable_advanced_site_search_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_enable_advanced_site_search", + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "pre_enable_advanced_site_search", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.EnableAdvancedSiteSearchRequest.pb( + site_search_engine_service.EnableAdvancedSiteSearchRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.EnableAdvancedSiteSearchRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.enable_advanced_site_search( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_enable_advanced_site_search_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.EnableAdvancedSiteSearchRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_advanced_site_search(request) + + +def test_enable_advanced_site_search_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.DisableAdvancedSiteSearchRequest, + dict, + ], +) +def test_disable_advanced_site_search_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.disable_advanced_site_search(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_disable_advanced_site_search_rest_required_fields( + request_type=site_search_engine_service.DisableAdvancedSiteSearchRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["site_search_engine"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_advanced_site_search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["siteSearchEngine"] = "site_search_engine_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_advanced_site_search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "siteSearchEngine" in jsonified_request + assert jsonified_request["siteSearchEngine"] == "site_search_engine_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.disable_advanced_site_search(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_disable_advanced_site_search_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.disable_advanced_site_search._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("siteSearchEngine",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_disable_advanced_site_search_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_disable_advanced_site_search", + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "pre_disable_advanced_site_search", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.DisableAdvancedSiteSearchRequest.pb( + site_search_engine_service.DisableAdvancedSiteSearchRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.DisableAdvancedSiteSearchRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.disable_advanced_site_search( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_disable_advanced_site_search_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.DisableAdvancedSiteSearchRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.disable_advanced_site_search(request) + + +def test_disable_advanced_site_search_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.RecrawlUrisRequest, + dict, + ], +) +def test_recrawl_uris_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.recrawl_uris(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_recrawl_uris_rest_required_fields( + request_type=site_search_engine_service.RecrawlUrisRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["site_search_engine"] = "" + request_init["uris"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).recrawl_uris._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["siteSearchEngine"] = "site_search_engine_value" + jsonified_request["uris"] = "uris_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).recrawl_uris._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "siteSearchEngine" in jsonified_request + assert jsonified_request["siteSearchEngine"] == "site_search_engine_value" + assert "uris" in jsonified_request + assert jsonified_request["uris"] == "uris_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.recrawl_uris(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_recrawl_uris_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.recrawl_uris._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "siteSearchEngine", + "uris", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_recrawl_uris_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_recrawl_uris" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_recrawl_uris" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.RecrawlUrisRequest.pb( + site_search_engine_service.RecrawlUrisRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.RecrawlUrisRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.recrawl_uris( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_recrawl_uris_rest_bad_request( + transport: str = "rest", request_type=site_search_engine_service.RecrawlUrisRequest +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.recrawl_uris(request) + + +def test_recrawl_uris_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.BatchVerifyTargetSitesRequest, + dict, + ], +) +def test_batch_verify_target_sites_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_verify_target_sites(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_verify_target_sites_rest_required_fields( + request_type=site_search_engine_service.BatchVerifyTargetSitesRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_verify_target_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_verify_target_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_verify_target_sites(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_verify_target_sites_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_verify_target_sites._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_verify_target_sites_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_batch_verify_target_sites", + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "pre_batch_verify_target_sites", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.BatchVerifyTargetSitesRequest.pb( + site_search_engine_service.BatchVerifyTargetSitesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.BatchVerifyTargetSitesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_verify_target_sites( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_verify_target_sites_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.BatchVerifyTargetSitesRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_verify_target_sites(request) + + +def test_batch_verify_target_sites_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.FetchDomainVerificationStatusRequest, + dict, + ], +) +def test_fetch_domain_verification_status_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine_service.FetchDomainVerificationStatusResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_domain_verification_status(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchDomainVerificationStatusPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_fetch_domain_verification_status_rest_required_fields( + request_type=site_search_engine_service.FetchDomainVerificationStatusRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["site_search_engine"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_domain_verification_status._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["siteSearchEngine"] = "site_search_engine_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_domain_verification_status._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "siteSearchEngine" in jsonified_request + assert jsonified_request["siteSearchEngine"] == "site_search_engine_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = site_search_engine_service.FetchDomainVerificationStatusResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_domain_verification_status(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_domain_verification_status_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.fetch_domain_verification_status._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("siteSearchEngine",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_domain_verification_status_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_fetch_domain_verification_status", + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "pre_fetch_domain_verification_status", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.FetchDomainVerificationStatusRequest.pb( + site_search_engine_service.FetchDomainVerificationStatusRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + site_search_engine_service.FetchDomainVerificationStatusResponse.to_json( + site_search_engine_service.FetchDomainVerificationStatusResponse() + ) + ) + + request = site_search_engine_service.FetchDomainVerificationStatusRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse() + ) + + client.fetch_domain_verification_status( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_domain_verification_status_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.FetchDomainVerificationStatusRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_domain_verification_status(request) + + +def test_fetch_domain_verification_status_rest_pager(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + site_search_engine_service.FetchDomainVerificationStatusResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "site_search_engine": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/siteSearchEngine" + } + + pager = client.fetch_domain_verification_status(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in results) + + pages = list( + client.fetch_domain_verification_status(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SiteSearchEngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SiteSearchEngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SiteSearchEngineServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SiteSearchEngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SiteSearchEngineServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SiteSearchEngineServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SiteSearchEngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SiteSearchEngineServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SiteSearchEngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SiteSearchEngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SiteSearchEngineServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SiteSearchEngineServiceGrpcTransport, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + transports.SiteSearchEngineServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = SiteSearchEngineServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SiteSearchEngineServiceGrpcTransport, + ) + + +def test_site_search_engine_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SiteSearchEngineServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_site_search_engine_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1.services.site_search_engine_service.transports.SiteSearchEngineServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SiteSearchEngineServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_site_search_engine", + "create_target_site", + "batch_create_target_sites", + "get_target_site", + "update_target_site", + "delete_target_site", + "list_target_sites", + "enable_advanced_site_search", + "disable_advanced_site_search", + "recrawl_uris", + "batch_verify_target_sites", + "fetch_domain_verification_status", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_site_search_engine_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1.services.site_search_engine_service.transports.SiteSearchEngineServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SiteSearchEngineServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_site_search_engine_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1.services.site_search_engine_service.transports.SiteSearchEngineServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SiteSearchEngineServiceTransport() + adc.assert_called_once() + + +def test_site_search_engine_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SiteSearchEngineServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SiteSearchEngineServiceGrpcTransport, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + ], +) +def test_site_search_engine_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SiteSearchEngineServiceGrpcTransport, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + transports.SiteSearchEngineServiceRestTransport, + ], +) +def test_site_search_engine_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SiteSearchEngineServiceGrpcTransport, grpc_helpers), + (transports.SiteSearchEngineServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_site_search_engine_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SiteSearchEngineServiceGrpcTransport, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + ], +) +def test_site_search_engine_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_site_search_engine_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SiteSearchEngineServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_site_search_engine_service_rest_lro_client(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_site_search_engine_service_host_no_port(transport_name): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_site_search_engine_service_host_with_port(transport_name): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_site_search_engine_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SiteSearchEngineServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SiteSearchEngineServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_site_search_engine._session + session2 = client2.transport.get_site_search_engine._session + assert session1 != session2 + session1 = client1.transport.create_target_site._session + session2 = client2.transport.create_target_site._session + assert session1 != session2 + session1 = client1.transport.batch_create_target_sites._session + session2 = client2.transport.batch_create_target_sites._session + assert session1 != session2 + session1 = client1.transport.get_target_site._session + session2 = client2.transport.get_target_site._session + assert session1 != session2 + session1 = client1.transport.update_target_site._session + session2 = client2.transport.update_target_site._session + assert session1 != session2 + session1 = client1.transport.delete_target_site._session + session2 = client2.transport.delete_target_site._session + assert session1 != session2 + session1 = client1.transport.list_target_sites._session + session2 = client2.transport.list_target_sites._session + assert session1 != session2 + session1 = client1.transport.enable_advanced_site_search._session + session2 = client2.transport.enable_advanced_site_search._session + assert session1 != session2 + session1 = client1.transport.disable_advanced_site_search._session + session2 = client2.transport.disable_advanced_site_search._session + assert session1 != session2 + session1 = client1.transport.recrawl_uris._session + session2 = client2.transport.recrawl_uris._session + assert session1 != session2 + session1 = client1.transport.batch_verify_target_sites._session + session2 = client2.transport.batch_verify_target_sites._session + assert session1 != session2 + session1 = client1.transport.fetch_domain_verification_status._session + session2 = client2.transport.fetch_domain_verification_status._session + assert session1 != session2 + + +def test_site_search_engine_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SiteSearchEngineServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_site_search_engine_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SiteSearchEngineServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SiteSearchEngineServiceGrpcTransport, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + ], +) +def test_site_search_engine_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SiteSearchEngineServiceGrpcTransport, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + ], +) +def test_site_search_engine_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_site_search_engine_service_grpc_lro_client(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_site_search_engine_service_grpc_lro_async_client(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_site_search_engine_path(): + project = "squid" + location = "clam" + data_store = "whelk" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/siteSearchEngine".format( + project=project, + location=location, + data_store=data_store, + ) + actual = SiteSearchEngineServiceClient.site_search_engine_path( + project, location, data_store + ) + assert expected == actual + + +def test_parse_site_search_engine_path(): + expected = { + "project": "octopus", + "location": "oyster", + "data_store": "nudibranch", + } + path = SiteSearchEngineServiceClient.site_search_engine_path(**expected) + + # Check that the path construction is reversible. + actual = SiteSearchEngineServiceClient.parse_site_search_engine_path(path) + assert expected == actual + + +def test_target_site_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + target_site = "nautilus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}".format( + project=project, + location=location, + data_store=data_store, + target_site=target_site, + ) + actual = SiteSearchEngineServiceClient.target_site_path( + project, location, data_store, target_site + ) + assert expected == actual + + +def test_parse_target_site_path(): + expected = { + "project": "scallop", + "location": "abalone", + "data_store": "squid", + "target_site": "clam", + } + path = SiteSearchEngineServiceClient.target_site_path(**expected) + + # Check that the path construction is reversible. + actual = SiteSearchEngineServiceClient.parse_target_site_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SiteSearchEngineServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = SiteSearchEngineServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SiteSearchEngineServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SiteSearchEngineServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = SiteSearchEngineServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SiteSearchEngineServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SiteSearchEngineServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = SiteSearchEngineServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SiteSearchEngineServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = SiteSearchEngineServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = SiteSearchEngineServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SiteSearchEngineServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SiteSearchEngineServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = SiteSearchEngineServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SiteSearchEngineServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SiteSearchEngineServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SiteSearchEngineServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SiteSearchEngineServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + ), + ( + SiteSearchEngineServiceAsyncClient, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py index db91d571fdcf..5a55c15911fe 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py @@ -26,9 +26,18 @@ import json import math -from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import api_core_version, client_options from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError @@ -49,7 +58,12 @@ CompletionServiceClient, transports, ) -from google.cloud.discoveryengine_v1beta.types import completion_service +from google.cloud.discoveryengine_v1beta.types import ( + completion, + completion_service, + import_config, + purge_config, +) def client_cert_source_callback(): @@ -1306,6 +1320,316 @@ async def test_complete_query_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + import_config.ImportSuggestionDenyListEntriesRequest, + dict, + ], +) +def test_import_suggestion_deny_list_entries(request_type, transport: str = "grpc"): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_suggestion_deny_list_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.import_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == import_config.ImportSuggestionDenyListEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_suggestion_deny_list_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_suggestion_deny_list_entries), "__call__" + ) as call: + client.import_suggestion_deny_list_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == import_config.ImportSuggestionDenyListEntriesRequest() + + +@pytest.mark.asyncio +async def test_import_suggestion_deny_list_entries_async( + transport: str = "grpc_asyncio", + request_type=import_config.ImportSuggestionDenyListEntriesRequest, +): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_suggestion_deny_list_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.import_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == import_config.ImportSuggestionDenyListEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_import_suggestion_deny_list_entries_async_from_dict(): + await test_import_suggestion_deny_list_entries_async(request_type=dict) + + +def test_import_suggestion_deny_list_entries_field_headers(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = import_config.ImportSuggestionDenyListEntriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_suggestion_deny_list_entries), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_suggestion_deny_list_entries_field_headers_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = import_config.ImportSuggestionDenyListEntriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_suggestion_deny_list_entries), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.import_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeSuggestionDenyListEntriesRequest, + dict, + ], +) +def test_purge_suggestion_deny_list_entries(request_type, transport: str = "grpc"): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_suggestion_deny_list_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.purge_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeSuggestionDenyListEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_purge_suggestion_deny_list_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_suggestion_deny_list_entries), "__call__" + ) as call: + client.purge_suggestion_deny_list_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeSuggestionDenyListEntriesRequest() + + +@pytest.mark.asyncio +async def test_purge_suggestion_deny_list_entries_async( + transport: str = "grpc_asyncio", + request_type=purge_config.PurgeSuggestionDenyListEntriesRequest, +): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_suggestion_deny_list_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.purge_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeSuggestionDenyListEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_purge_suggestion_deny_list_entries_async_from_dict(): + await test_purge_suggestion_deny_list_entries_async(request_type=dict) + + +def test_purge_suggestion_deny_list_entries_field_headers(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeSuggestionDenyListEntriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_suggestion_deny_list_entries), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.purge_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_purge_suggestion_deny_list_entries_field_headers_async(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeSuggestionDenyListEntriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_suggestion_deny_list_entries), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.purge_suggestion_deny_list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -1561,23 +1885,459 @@ def test_complete_query_rest_error(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CompletionServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + import_config.ImportSuggestionDenyListEntriesRequest, + dict, + ], +) +def test_import_suggestion_deny_list_entries_rest(request_type): + client = CompletionServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = CompletionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.CompletionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CompletionServiceClient( + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.import_suggestion_deny_list_entries(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_import_suggestion_deny_list_entries_rest_required_fields( + request_type=import_config.ImportSuggestionDenyListEntriesRequest, +): + transport_class = transports.CompletionServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_suggestion_deny_list_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_suggestion_deny_list_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.import_suggestion_deny_list_entries(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_import_suggestion_deny_list_entries_rest_unset_required_fields(): + transport = transports.CompletionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.import_suggestion_deny_list_entries._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_suggestion_deny_list_entries_rest_interceptors(null_interceptor): + transport = transports.CompletionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CompletionServiceRestInterceptor(), + ) + client = CompletionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_import_suggestion_deny_list_entries", + ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "pre_import_suggestion_deny_list_entries", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = import_config.ImportSuggestionDenyListEntriesRequest.pb( + import_config.ImportSuggestionDenyListEntriesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = import_config.ImportSuggestionDenyListEntriesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.import_suggestion_deny_list_entries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_suggestion_deny_list_entries_rest_bad_request( + transport: str = "rest", + request_type=import_config.ImportSuggestionDenyListEntriesRequest, +): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.import_suggestion_deny_list_entries(request) + + +def test_import_suggestion_deny_list_entries_rest_error(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeSuggestionDenyListEntriesRequest, + dict, + ], +) +def test_purge_suggestion_deny_list_entries_rest(request_type): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.purge_suggestion_deny_list_entries(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_purge_suggestion_deny_list_entries_rest_required_fields( + request_type=purge_config.PurgeSuggestionDenyListEntriesRequest, +): + transport_class = transports.CompletionServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_suggestion_deny_list_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_suggestion_deny_list_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.purge_suggestion_deny_list_entries(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_purge_suggestion_deny_list_entries_rest_unset_required_fields(): + transport = transports.CompletionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.purge_suggestion_deny_list_entries._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_purge_suggestion_deny_list_entries_rest_interceptors(null_interceptor): + transport = transports.CompletionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CompletionServiceRestInterceptor(), + ) + client = CompletionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_purge_suggestion_deny_list_entries", + ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "pre_purge_suggestion_deny_list_entries", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = purge_config.PurgeSuggestionDenyListEntriesRequest.pb( + purge_config.PurgeSuggestionDenyListEntriesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = purge_config.PurgeSuggestionDenyListEntriesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.purge_suggestion_deny_list_entries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_purge_suggestion_deny_list_entries_rest_bad_request( + transport: str = "rest", + request_type=purge_config.PurgeSuggestionDenyListEntriesRequest, +): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.purge_suggestion_deny_list_entries(request) + + +def test_purge_suggestion_deny_list_entries_rest_error(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CompletionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CompletionServiceClient( client_options={"credentials_file": "credentials.json"}, transport=transport, ) @@ -1701,6 +2461,8 @@ def test_completion_service_base_transport(): # raise NotImplementedError. methods = ( "complete_query", + "import_suggestion_deny_list_entries", + "purge_suggestion_deny_list_entries", "get_operation", "list_operations", ) @@ -1711,6 +2473,11 @@ def test_completion_service_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + # Catch all for all remaining methods and properties remainder = [ "kind", @@ -1898,6 +2665,23 @@ def test_completion_service_http_transport_client_cert_source_for_mtls(): mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) +def test_completion_service_rest_lro_client(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + @pytest.mark.parametrize( "transport_name", [ @@ -1964,6 +2748,12 @@ def test_completion_service_client_transport_session_collision(transport_name): session1 = client1.transport.complete_query._session session2 = client2.transport.complete_query._session assert session1 != session2 + session1 = client1.transport.import_suggestion_deny_list_entries._session + session2 = client2.transport.import_suggestion_deny_list_entries._session + assert session1 != session2 + session1 = client1.transport.purge_suggestion_deny_list_entries._session + session2 = client2.transport.purge_suggestion_deny_list_entries._session + assert session1 != session2 def test_completion_service_grpc_transport_channel(): @@ -2092,6 +2882,40 @@ def test_completion_service_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel +def test_completion_service_grpc_lro_client(): + client = CompletionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_completion_service_grpc_lro_async_client(): + client = CompletionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + def test_data_store_path(): project = "squid" location = "clam" @@ -2269,7 +3093,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -2299,7 +3123,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2332,7 +3156,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -2362,7 +3186,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py index 043ef70b5367..57a7c8f03637 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py @@ -3276,6 +3276,25 @@ def test_create_conversation_rest(request_type): "categories": ["categories_value1", "categories_value2"], "scores": [0.656, 0.657], }, + "summary_with_metadata": { + "summary": "summary_value", + "citation_metadata": { + "citations": [ + { + "start_index": 1189, + "end_index": 942, + "sources": [{"reference_index": 1574}], + } + ] + }, + "references": [ + { + "title": "title_value", + "document": "document_value", + "uri": "uri_value", + } + ], + }, }, }, "create_time": {"seconds": 751, "nanos": 543}, @@ -3940,6 +3959,25 @@ def test_update_conversation_rest(request_type): "categories": ["categories_value1", "categories_value2"], "scores": [0.656, 0.657], }, + "summary_with_metadata": { + "summary": "summary_value", + "citation_metadata": { + "citations": [ + { + "start_index": 1189, + "end_index": 942, + "sources": [{"reference_index": 1574}], + } + ] + }, + "references": [ + { + "title": "title_value", + "document": "document_value", + "uri": "uri_value", + } + ], + }, }, }, "create_time": {"seconds": 751, "nanos": 543}, @@ -5767,7 +5805,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -5797,7 +5835,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5830,7 +5868,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -5860,7 +5898,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_data_store_service.py new file mode 100644 index 000000000000..837b4e0c5f2f --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_data_store_service.py @@ -0,0 +1,5532 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1beta.services.data_store_service import ( + DataStoreServiceAsyncClient, + DataStoreServiceClient, + pagers, + transports, +) +from google.cloud.discoveryengine_v1beta.types import data_store as gcd_data_store +from google.cloud.discoveryengine_v1beta.types import common +from google.cloud.discoveryengine_v1beta.types import data_store +from google.cloud.discoveryengine_v1beta.types import data_store_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataStoreServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DataStoreServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DataStoreServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DataStoreServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataStoreServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataStoreServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert DataStoreServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DataStoreServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DataStoreServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + DataStoreServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DataStoreServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DataStoreServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DataStoreServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DataStoreServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DataStoreServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DataStoreServiceClient._get_client_cert_source(None, False) is None + assert ( + DataStoreServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + DataStoreServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + DataStoreServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + DataStoreServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + DataStoreServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceClient), +) +@mock.patch.object( + DataStoreServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DataStoreServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataStoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DataStoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + DataStoreServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + DataStoreServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == DataStoreServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DataStoreServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + DataStoreServiceClient._get_api_endpoint(None, None, default_universe, "always") + == DataStoreServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DataStoreServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == DataStoreServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DataStoreServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + DataStoreServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + DataStoreServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + DataStoreServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + DataStoreServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + DataStoreServiceClient._get_universe_domain(None, None) + == DataStoreServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + DataStoreServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DataStoreServiceClient, transports.DataStoreServiceGrpcTransport, "grpc"), + (DataStoreServiceClient, transports.DataStoreServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DataStoreServiceClient, "grpc"), + (DataStoreServiceAsyncClient, "grpc_asyncio"), + (DataStoreServiceClient, "rest"), + ], +) +def test_data_store_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DataStoreServiceGrpcTransport, "grpc"), + (transports.DataStoreServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DataStoreServiceRestTransport, "rest"), + ], +) +def test_data_store_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DataStoreServiceClient, "grpc"), + (DataStoreServiceAsyncClient, "grpc_asyncio"), + (DataStoreServiceClient, "rest"), + ], +) +def test_data_store_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_data_store_service_client_get_transport_class(): + transport = DataStoreServiceClient.get_transport_class() + available_transports = [ + transports.DataStoreServiceGrpcTransport, + transports.DataStoreServiceRestTransport, + ] + assert transport in available_transports + + transport = DataStoreServiceClient.get_transport_class("grpc") + assert transport == transports.DataStoreServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DataStoreServiceClient, transports.DataStoreServiceGrpcTransport, "grpc"), + ( + DataStoreServiceAsyncClient, + transports.DataStoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DataStoreServiceClient, transports.DataStoreServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + DataStoreServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceClient), +) +@mock.patch.object( + DataStoreServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceAsyncClient), +) +def test_data_store_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataStoreServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataStoreServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + DataStoreServiceClient, + transports.DataStoreServiceGrpcTransport, + "grpc", + "true", + ), + ( + DataStoreServiceAsyncClient, + transports.DataStoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + DataStoreServiceClient, + transports.DataStoreServiceGrpcTransport, + "grpc", + "false", + ), + ( + DataStoreServiceAsyncClient, + transports.DataStoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + DataStoreServiceClient, + transports.DataStoreServiceRestTransport, + "rest", + "true", + ), + ( + DataStoreServiceClient, + transports.DataStoreServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + DataStoreServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceClient), +) +@mock.patch.object( + DataStoreServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_store_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [DataStoreServiceClient, DataStoreServiceAsyncClient] +) +@mock.patch.object( + DataStoreServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataStoreServiceClient), +) +@mock.patch.object( + DataStoreServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataStoreServiceAsyncClient), +) +def test_data_store_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [DataStoreServiceClient, DataStoreServiceAsyncClient] +) +@mock.patch.object( + DataStoreServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceClient), +) +@mock.patch.object( + DataStoreServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataStoreServiceAsyncClient), +) +def test_data_store_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DataStoreServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataStoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DataStoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DataStoreServiceClient, transports.DataStoreServiceGrpcTransport, "grpc"), + ( + DataStoreServiceAsyncClient, + transports.DataStoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DataStoreServiceClient, transports.DataStoreServiceRestTransport, "rest"), + ], +) +def test_data_store_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DataStoreServiceClient, + transports.DataStoreServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DataStoreServiceAsyncClient, + transports.DataStoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + DataStoreServiceClient, + transports.DataStoreServiceRestTransport, + "rest", + None, + ), + ], +) +def test_data_store_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_data_store_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.data_store_service.transports.DataStoreServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DataStoreServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DataStoreServiceClient, + transports.DataStoreServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DataStoreServiceAsyncClient, + transports.DataStoreServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_data_store_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.CreateDataStoreRequest, + dict, + ], +) +def test_create_data_store(request_type, transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.CreateDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_data_store_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_store), "__call__" + ) as call: + client.create_data_store() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.CreateDataStoreRequest() + + +@pytest.mark.asyncio +async def test_create_data_store_async( + transport: str = "grpc_asyncio", + request_type=data_store_service.CreateDataStoreRequest, +): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.CreateDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_data_store_async_from_dict(): + await test_create_data_store_async(request_type=dict) + + +def test_create_data_store_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.CreateDataStoreRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_store), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_data_store_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.CreateDataStoreRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_store), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_data_store_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_data_store( + parent="parent_value", + data_store=gcd_data_store.DataStore(name="name_value"), + data_store_id="data_store_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].data_store + mock_val = gcd_data_store.DataStore(name="name_value") + assert arg == mock_val + arg = args[0].data_store_id + mock_val = "data_store_id_value" + assert arg == mock_val + + +def test_create_data_store_flattened_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_store( + data_store_service.CreateDataStoreRequest(), + parent="parent_value", + data_store=gcd_data_store.DataStore(name="name_value"), + data_store_id="data_store_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_data_store_flattened_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_data_store( + parent="parent_value", + data_store=gcd_data_store.DataStore(name="name_value"), + data_store_id="data_store_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].data_store + mock_val = gcd_data_store.DataStore(name="name_value") + assert arg == mock_val + arg = args[0].data_store_id + mock_val = "data_store_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_data_store_flattened_error_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_data_store( + data_store_service.CreateDataStoreRequest(), + parent="parent_value", + data_store=gcd_data_store.DataStore(name="name_value"), + data_store_id="data_store_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.GetDataStoreRequest, + dict, + ], +) +def test_get_data_store(request_type, transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_store), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_store.DataStore( + name="name_value", + display_name="display_name_value", + industry_vertical=common.IndustryVertical.GENERIC, + solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], + default_schema_id="default_schema_id_value", + content_config=data_store.DataStore.ContentConfig.NO_CONTENT, + ) + response = client.get_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.GetDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, data_store.DataStore) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.industry_vertical == common.IndustryVertical.GENERIC + assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + assert response.default_schema_id == "default_schema_id_value" + assert response.content_config == data_store.DataStore.ContentConfig.NO_CONTENT + + +def test_get_data_store_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_store), "__call__") as call: + client.get_data_store() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.GetDataStoreRequest() + + +@pytest.mark.asyncio +async def test_get_data_store_async( + transport: str = "grpc_asyncio", request_type=data_store_service.GetDataStoreRequest +): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_store), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_store.DataStore( + name="name_value", + display_name="display_name_value", + industry_vertical=common.IndustryVertical.GENERIC, + solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], + default_schema_id="default_schema_id_value", + content_config=data_store.DataStore.ContentConfig.NO_CONTENT, + ) + ) + response = await client.get_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.GetDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, data_store.DataStore) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.industry_vertical == common.IndustryVertical.GENERIC + assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + assert response.default_schema_id == "default_schema_id_value" + assert response.content_config == data_store.DataStore.ContentConfig.NO_CONTENT + + +@pytest.mark.asyncio +async def test_get_data_store_async_from_dict(): + await test_get_data_store_async(request_type=dict) + + +def test_get_data_store_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.GetDataStoreRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_store), "__call__") as call: + call.return_value = data_store.DataStore() + client.get_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_data_store_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.GetDataStoreRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_store), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_store.DataStore() + ) + await client.get_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_data_store_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_store), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_store.DataStore() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_store( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_data_store_flattened_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_store( + data_store_service.GetDataStoreRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_data_store_flattened_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_store), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_store.DataStore() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_store.DataStore() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_store( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_data_store_flattened_error_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_store( + data_store_service.GetDataStoreRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.ListDataStoresRequest, + dict, + ], +) +def test_list_data_stores(request_type, transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_store_service.ListDataStoresResponse( + next_page_token="next_page_token_value", + ) + response = client.list_data_stores(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.ListDataStoresRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataStoresPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_data_stores_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + client.list_data_stores() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.ListDataStoresRequest() + + +@pytest.mark.asyncio +async def test_list_data_stores_async( + transport: str = "grpc_asyncio", + request_type=data_store_service.ListDataStoresRequest, +): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_store_service.ListDataStoresResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_data_stores(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.ListDataStoresRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataStoresAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_data_stores_async_from_dict(): + await test_list_data_stores_async(request_type=dict) + + +def test_list_data_stores_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.ListDataStoresRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + call.return_value = data_store_service.ListDataStoresResponse() + client.list_data_stores(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_data_stores_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.ListDataStoresRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_store_service.ListDataStoresResponse() + ) + await client.list_data_stores(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_data_stores_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_store_service.ListDataStoresResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_stores( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_data_stores_flattened_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_stores( + data_store_service.ListDataStoresRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_data_stores_flattened_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_store_service.ListDataStoresResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_store_service.ListDataStoresResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_stores( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_data_stores_flattened_error_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_stores( + data_store_service.ListDataStoresRequest(), + parent="parent_value", + ) + + +def test_list_data_stores_pager(transport_name: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + data_store.DataStore(), + ], + next_page_token="abc", + ), + data_store_service.ListDataStoresResponse( + data_stores=[], + next_page_token="def", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + ], + next_page_token="ghi", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_data_stores(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_store.DataStore) for i in results) + + +def test_list_data_stores_pages(transport_name: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_stores), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + data_store.DataStore(), + ], + next_page_token="abc", + ), + data_store_service.ListDataStoresResponse( + data_stores=[], + next_page_token="def", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + ], + next_page_token="ghi", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_stores(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_data_stores_async_pager(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_stores), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + data_store.DataStore(), + ], + next_page_token="abc", + ), + data_store_service.ListDataStoresResponse( + data_stores=[], + next_page_token="def", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + ], + next_page_token="ghi", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_stores( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, data_store.DataStore) for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_stores_async_pages(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_stores), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + data_store.DataStore(), + ], + next_page_token="abc", + ), + data_store_service.ListDataStoresResponse( + data_stores=[], + next_page_token="def", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + ], + next_page_token="ghi", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_stores(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.DeleteDataStoreRequest, + dict, + ], +) +def test_delete_data_store(request_type, transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.DeleteDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_data_store_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_store), "__call__" + ) as call: + client.delete_data_store() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.DeleteDataStoreRequest() + + +@pytest.mark.asyncio +async def test_delete_data_store_async( + transport: str = "grpc_asyncio", + request_type=data_store_service.DeleteDataStoreRequest, +): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.DeleteDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_data_store_async_from_dict(): + await test_delete_data_store_async(request_type=dict) + + +def test_delete_data_store_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.DeleteDataStoreRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_store), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_data_store_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.DeleteDataStoreRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_store), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_data_store_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_data_store( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_data_store_flattened_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_store( + data_store_service.DeleteDataStoreRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_data_store_flattened_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_data_store( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_data_store_flattened_error_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_data_store( + data_store_service.DeleteDataStoreRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.UpdateDataStoreRequest, + dict, + ], +) +def test_update_data_store(request_type, transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_data_store.DataStore( + name="name_value", + display_name="display_name_value", + industry_vertical=common.IndustryVertical.GENERIC, + solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], + default_schema_id="default_schema_id_value", + content_config=gcd_data_store.DataStore.ContentConfig.NO_CONTENT, + ) + response = client.update_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.UpdateDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_data_store.DataStore) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.industry_vertical == common.IndustryVertical.GENERIC + assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + assert response.default_schema_id == "default_schema_id_value" + assert response.content_config == gcd_data_store.DataStore.ContentConfig.NO_CONTENT + + +def test_update_data_store_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_store), "__call__" + ) as call: + client.update_data_store() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.UpdateDataStoreRequest() + + +@pytest.mark.asyncio +async def test_update_data_store_async( + transport: str = "grpc_asyncio", + request_type=data_store_service.UpdateDataStoreRequest, +): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_data_store.DataStore( + name="name_value", + display_name="display_name_value", + industry_vertical=common.IndustryVertical.GENERIC, + solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], + default_schema_id="default_schema_id_value", + content_config=gcd_data_store.DataStore.ContentConfig.NO_CONTENT, + ) + ) + response = await client.update_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == data_store_service.UpdateDataStoreRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_data_store.DataStore) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.industry_vertical == common.IndustryVertical.GENERIC + assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + assert response.default_schema_id == "default_schema_id_value" + assert response.content_config == gcd_data_store.DataStore.ContentConfig.NO_CONTENT + + +@pytest.mark.asyncio +async def test_update_data_store_async_from_dict(): + await test_update_data_store_async(request_type=dict) + + +def test_update_data_store_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.UpdateDataStoreRequest() + + request.data_store.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_store), "__call__" + ) as call: + call.return_value = gcd_data_store.DataStore() + client.update_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_data_store_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_store_service.UpdateDataStoreRequest() + + request.data_store.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_store), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_data_store.DataStore() + ) + await client.update_data_store(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store.name=name_value", + ) in kw["metadata"] + + +def test_update_data_store_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_data_store.DataStore() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_store( + data_store=gcd_data_store.DataStore(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_store + mock_val = gcd_data_store.DataStore(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_data_store_flattened_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_store( + data_store_service.UpdateDataStoreRequest(), + data_store=gcd_data_store.DataStore(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_data_store_flattened_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_store), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_data_store.DataStore() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_data_store.DataStore() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_store( + data_store=gcd_data_store.DataStore(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_store + mock_val = gcd_data_store.DataStore(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_data_store_flattened_error_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_store( + data_store_service.UpdateDataStoreRequest(), + data_store=gcd_data_store.DataStore(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.CreateDataStoreRequest, + dict, + ], +) +def test_create_data_store_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["data_store"] = { + "name": "name_value", + "display_name": "display_name_value", + "industry_vertical": 1, + "solution_types": [1], + "default_schema_id": "default_schema_id_value", + "content_config": 1, + "create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = data_store_service.CreateDataStoreRequest.meta.fields["data_store"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_store"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_store"][field])): + del request_init["data_store"][field][i][subfield] + else: + del request_init["data_store"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_data_store(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_data_store_rest_required_fields( + request_type=data_store_service.CreateDataStoreRequest, +): + transport_class = transports.DataStoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["data_store_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "dataStoreId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_data_store._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "dataStoreId" in jsonified_request + assert jsonified_request["dataStoreId"] == request_init["data_store_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["dataStoreId"] = "data_store_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_data_store._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "create_advanced_site_search", + "data_store_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "dataStoreId" in jsonified_request + assert jsonified_request["dataStoreId"] == "data_store_id_value" + + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_data_store(request) + + expected_params = [ + ( + "dataStoreId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_data_store_rest_unset_required_fields(): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_data_store._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "createAdvancedSiteSearch", + "dataStoreId", + ) + ) + & set( + ( + "parent", + "dataStore", + "dataStoreId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_data_store_rest_interceptors(null_interceptor): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataStoreServiceRestInterceptor(), + ) + client = DataStoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DataStoreServiceRestInterceptor, "post_create_data_store" + ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "pre_create_data_store" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = data_store_service.CreateDataStoreRequest.pb( + data_store_service.CreateDataStoreRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = data_store_service.CreateDataStoreRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_data_store( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_data_store_rest_bad_request( + transport: str = "rest", request_type=data_store_service.CreateDataStoreRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_data_store(request) + + +def test_create_data_store_rest_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + data_store=gcd_data_store.DataStore(name="name_value"), + data_store_id="data_store_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_data_store(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/dataStores" + % client.transport._host, + args[1], + ) + + +def test_create_data_store_rest_flattened_error(transport: str = "rest"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_store( + data_store_service.CreateDataStoreRequest(), + parent="parent_value", + data_store=gcd_data_store.DataStore(name="name_value"), + data_store_id="data_store_id_value", + ) + + +def test_create_data_store_rest_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.GetDataStoreRequest, + dict, + ], +) +def test_get_data_store_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_store.DataStore( + name="name_value", + display_name="display_name_value", + industry_vertical=common.IndustryVertical.GENERIC, + solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], + default_schema_id="default_schema_id_value", + content_config=data_store.DataStore.ContentConfig.NO_CONTENT, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_store.DataStore.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_data_store(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, data_store.DataStore) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.industry_vertical == common.IndustryVertical.GENERIC + assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + assert response.default_schema_id == "default_schema_id_value" + assert response.content_config == data_store.DataStore.ContentConfig.NO_CONTENT + + +def test_get_data_store_rest_required_fields( + request_type=data_store_service.GetDataStoreRequest, +): + transport_class = transports.DataStoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_store._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_store._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_store.DataStore() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_store.DataStore.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_data_store(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_data_store_rest_unset_required_fields(): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_data_store._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_store_rest_interceptors(null_interceptor): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataStoreServiceRestInterceptor(), + ) + client = DataStoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "post_get_data_store" + ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "pre_get_data_store" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = data_store_service.GetDataStoreRequest.pb( + data_store_service.GetDataStoreRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = data_store.DataStore.to_json(data_store.DataStore()) + + request = data_store_service.GetDataStoreRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_store.DataStore() + + client.get_data_store( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_data_store_rest_bad_request( + transport: str = "rest", request_type=data_store_service.GetDataStoreRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_data_store(request) + + +def test_get_data_store_rest_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_store.DataStore() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_store.DataStore.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_data_store(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/dataStores/*}" + % client.transport._host, + args[1], + ) + + +def test_get_data_store_rest_flattened_error(transport: str = "rest"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_store( + data_store_service.GetDataStoreRequest(), + name="name_value", + ) + + +def test_get_data_store_rest_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.ListDataStoresRequest, + dict, + ], +) +def test_list_data_stores_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_store_service.ListDataStoresResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_store_service.ListDataStoresResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_data_stores(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataStoresPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_data_stores_rest_required_fields( + request_type=data_store_service.ListDataStoresRequest, +): + transport_class = transports.DataStoreServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_stores._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_stores._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_store_service.ListDataStoresResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_store_service.ListDataStoresResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_data_stores(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_data_stores_rest_unset_required_fields(): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_data_stores._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_stores_rest_interceptors(null_interceptor): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataStoreServiceRestInterceptor(), + ) + client = DataStoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "post_list_data_stores" + ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "pre_list_data_stores" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = data_store_service.ListDataStoresRequest.pb( + data_store_service.ListDataStoresRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = data_store_service.ListDataStoresResponse.to_json( + data_store_service.ListDataStoresResponse() + ) + + request = data_store_service.ListDataStoresRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_store_service.ListDataStoresResponse() + + client.list_data_stores( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_data_stores_rest_bad_request( + transport: str = "rest", request_type=data_store_service.ListDataStoresRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_data_stores(request) + + +def test_list_data_stores_rest_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_store_service.ListDataStoresResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_store_service.ListDataStoresResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_data_stores(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/dataStores" + % client.transport._host, + args[1], + ) + + +def test_list_data_stores_rest_flattened_error(transport: str = "rest"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_stores( + data_store_service.ListDataStoresRequest(), + parent="parent_value", + ) + + +def test_list_data_stores_rest_pager(transport: str = "rest"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + data_store.DataStore(), + ], + next_page_token="abc", + ), + data_store_service.ListDataStoresResponse( + data_stores=[], + next_page_token="def", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + ], + next_page_token="ghi", + ), + data_store_service.ListDataStoresResponse( + data_stores=[ + data_store.DataStore(), + data_store.DataStore(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + data_store_service.ListDataStoresResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_data_stores(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_store.DataStore) for i in results) + + pages = list(client.list_data_stores(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.DeleteDataStoreRequest, + dict, + ], +) +def test_delete_data_store_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_data_store(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_data_store_rest_required_fields( + request_type=data_store_service.DeleteDataStoreRequest, +): + transport_class = transports.DataStoreServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_store._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_store._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_data_store(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_data_store_rest_unset_required_fields(): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_data_store._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_data_store_rest_interceptors(null_interceptor): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataStoreServiceRestInterceptor(), + ) + client = DataStoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DataStoreServiceRestInterceptor, "post_delete_data_store" + ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "pre_delete_data_store" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = data_store_service.DeleteDataStoreRequest.pb( + data_store_service.DeleteDataStoreRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = data_store_service.DeleteDataStoreRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_data_store( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_data_store_rest_bad_request( + transport: str = "rest", request_type=data_store_service.DeleteDataStoreRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_data_store(request) + + +def test_delete_data_store_rest_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_data_store(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/dataStores/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_data_store_rest_flattened_error(transport: str = "rest"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_store( + data_store_service.DeleteDataStoreRequest(), + name="name_value", + ) + + +def test_delete_data_store_rest_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_store_service.UpdateDataStoreRequest, + dict, + ], +) +def test_update_data_store_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": {"name": "projects/sample1/locations/sample2/dataStores/sample3"} + } + request_init["data_store"] = { + "name": "projects/sample1/locations/sample2/dataStores/sample3", + "display_name": "display_name_value", + "industry_vertical": 1, + "solution_types": [1], + "default_schema_id": "default_schema_id_value", + "content_config": 1, + "create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = data_store_service.UpdateDataStoreRequest.meta.fields["data_store"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_store"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_store"][field])): + del request_init["data_store"][field][i][subfield] + else: + del request_init["data_store"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_data_store.DataStore( + name="name_value", + display_name="display_name_value", + industry_vertical=common.IndustryVertical.GENERIC, + solution_types=[common.SolutionType.SOLUTION_TYPE_RECOMMENDATION], + default_schema_id="default_schema_id_value", + content_config=gcd_data_store.DataStore.ContentConfig.NO_CONTENT, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_data_store.DataStore.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_data_store(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_data_store.DataStore) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.industry_vertical == common.IndustryVertical.GENERIC + assert response.solution_types == [common.SolutionType.SOLUTION_TYPE_RECOMMENDATION] + assert response.default_schema_id == "default_schema_id_value" + assert response.content_config == gcd_data_store.DataStore.ContentConfig.NO_CONTENT + + +def test_update_data_store_rest_required_fields( + request_type=data_store_service.UpdateDataStoreRequest, +): + transport_class = transports.DataStoreServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_store._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_store._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_data_store.DataStore() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcd_data_store.DataStore.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_data_store(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_data_store_rest_unset_required_fields(): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_data_store._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("dataStore",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_data_store_rest_interceptors(null_interceptor): + transport = transports.DataStoreServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataStoreServiceRestInterceptor(), + ) + client = DataStoreServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "post_update_data_store" + ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "pre_update_data_store" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = data_store_service.UpdateDataStoreRequest.pb( + data_store_service.UpdateDataStoreRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_data_store.DataStore.to_json( + gcd_data_store.DataStore() + ) + + request = data_store_service.UpdateDataStoreRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_data_store.DataStore() + + client.update_data_store( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_data_store_rest_bad_request( + transport: str = "rest", request_type=data_store_service.UpdateDataStoreRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": {"name": "projects/sample1/locations/sample2/dataStores/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_data_store(request) + + +def test_update_data_store_rest_flattened(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_data_store.DataStore() + + # get arguments that satisfy an http rule for this method + sample_request = { + "data_store": { + "name": "projects/sample1/locations/sample2/dataStores/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + data_store=gcd_data_store.DataStore(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_data_store.DataStore.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_data_store(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{data_store.name=projects/*/locations/*/dataStores/*}" + % client.transport._host, + args[1], + ) + + +def test_update_data_store_rest_flattened_error(transport: str = "rest"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_store( + data_store_service.UpdateDataStoreRequest(), + data_store=gcd_data_store.DataStore(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_data_store_rest_error(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataStoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataStoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataStoreServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataStoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataStoreServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataStoreServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataStoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataStoreServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataStoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataStoreServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataStoreServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataStoreServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataStoreServiceGrpcTransport, + transports.DataStoreServiceGrpcAsyncIOTransport, + transports.DataStoreServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DataStoreServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataStoreServiceGrpcTransport, + ) + + +def test_data_store_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataStoreServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_data_store_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.data_store_service.transports.DataStoreServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DataStoreServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_data_store", + "get_data_store", + "list_data_stores", + "delete_data_store", + "update_data_store", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_data_store_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1beta.services.data_store_service.transports.DataStoreServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataStoreServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_data_store_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1beta.services.data_store_service.transports.DataStoreServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataStoreServiceTransport() + adc.assert_called_once() + + +def test_data_store_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataStoreServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataStoreServiceGrpcTransport, + transports.DataStoreServiceGrpcAsyncIOTransport, + ], +) +def test_data_store_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataStoreServiceGrpcTransport, + transports.DataStoreServiceGrpcAsyncIOTransport, + transports.DataStoreServiceRestTransport, + ], +) +def test_data_store_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataStoreServiceGrpcTransport, grpc_helpers), + (transports.DataStoreServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_data_store_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataStoreServiceGrpcTransport, + transports.DataStoreServiceGrpcAsyncIOTransport, + ], +) +def test_data_store_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_data_store_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DataStoreServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_data_store_service_rest_lro_client(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_data_store_service_host_no_port(transport_name): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_data_store_service_host_with_port(transport_name): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_data_store_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DataStoreServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DataStoreServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_data_store._session + session2 = client2.transport.create_data_store._session + assert session1 != session2 + session1 = client1.transport.get_data_store._session + session2 = client2.transport.get_data_store._session + assert session1 != session2 + session1 = client1.transport.list_data_stores._session + session2 = client2.transport.list_data_stores._session + assert session1 != session2 + session1 = client1.transport.delete_data_store._session + session2 = client2.transport.delete_data_store._session + assert session1 != session2 + session1 = client1.transport.update_data_store._session + session2 = client2.transport.update_data_store._session + assert session1 != session2 + + +def test_data_store_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataStoreServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_data_store_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataStoreServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataStoreServiceGrpcTransport, + transports.DataStoreServiceGrpcAsyncIOTransport, + ], +) +def test_data_store_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataStoreServiceGrpcTransport, + transports.DataStoreServiceGrpcAsyncIOTransport, + ], +) +def test_data_store_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_store_service_grpc_lro_client(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_store_service_grpc_lro_async_client(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_collection_path(): + project = "squid" + location = "clam" + collection = "whelk" + expected = ( + "projects/{project}/locations/{location}/collections/{collection}".format( + project=project, + location=location, + collection=collection, + ) + ) + actual = DataStoreServiceClient.collection_path(project, location, collection) + assert expected == actual + + +def test_parse_collection_path(): + expected = { + "project": "octopus", + "location": "oyster", + "collection": "nudibranch", + } + path = DataStoreServiceClient.collection_path(**expected) + + # Check that the path construction is reversible. + actual = DataStoreServiceClient.parse_collection_path(path) + assert expected == actual + + +def test_data_store_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = DataStoreServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "data_store": "abalone", + } + path = DataStoreServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = DataStoreServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DataStoreServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = DataStoreServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataStoreServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DataStoreServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = DataStoreServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataStoreServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DataStoreServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = DataStoreServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataStoreServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = DataStoreServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = DataStoreServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataStoreServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DataStoreServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = DataStoreServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataStoreServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DataStoreServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DataStoreServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DataStoreServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DataStoreServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = DataStoreServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DataStoreServiceClient, transports.DataStoreServiceGrpcTransport), + (DataStoreServiceAsyncClient, transports.DataStoreServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py index da86daf21708..8dd83529bf91 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py @@ -5675,7 +5675,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -5705,7 +5705,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5738,7 +5738,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -5768,7 +5768,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_engine_service.py new file mode 100644 index 000000000000..e68de8b245ce --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_engine_service.py @@ -0,0 +1,5557 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1beta.services.engine_service import ( + EngineServiceAsyncClient, + EngineServiceClient, + pagers, + transports, +) +from google.cloud.discoveryengine_v1beta.types import common +from google.cloud.discoveryengine_v1beta.types import engine +from google.cloud.discoveryengine_v1beta.types import engine as gcd_engine +from google.cloud.discoveryengine_v1beta.types import engine_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert EngineServiceClient._get_default_mtls_endpoint(None) is None + assert ( + EngineServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + EngineServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + EngineServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + EngineServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + EngineServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert EngineServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert EngineServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert EngineServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + EngineServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert EngineServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert EngineServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert EngineServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + EngineServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert EngineServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert EngineServiceClient._get_client_cert_source(None, False) is None + assert ( + EngineServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + EngineServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + EngineServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + EngineServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + EngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceClient), +) +@mock.patch.object( + EngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = EngineServiceClient._DEFAULT_UNIVERSE + default_endpoint = EngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = EngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + EngineServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + EngineServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == EngineServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EngineServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + EngineServiceClient._get_api_endpoint(None, None, default_universe, "always") + == EngineServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EngineServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == EngineServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EngineServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + EngineServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + EngineServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + EngineServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + EngineServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + EngineServiceClient._get_universe_domain(None, None) + == EngineServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + EngineServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (EngineServiceClient, transports.EngineServiceGrpcTransport, "grpc"), + (EngineServiceClient, transports.EngineServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (EngineServiceClient, "grpc"), + (EngineServiceAsyncClient, "grpc_asyncio"), + (EngineServiceClient, "rest"), + ], +) +def test_engine_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.EngineServiceGrpcTransport, "grpc"), + (transports.EngineServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.EngineServiceRestTransport, "rest"), + ], +) +def test_engine_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (EngineServiceClient, "grpc"), + (EngineServiceAsyncClient, "grpc_asyncio"), + (EngineServiceClient, "rest"), + ], +) +def test_engine_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_engine_service_client_get_transport_class(): + transport = EngineServiceClient.get_transport_class() + available_transports = [ + transports.EngineServiceGrpcTransport, + transports.EngineServiceRestTransport, + ] + assert transport in available_transports + + transport = EngineServiceClient.get_transport_class("grpc") + assert transport == transports.EngineServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (EngineServiceClient, transports.EngineServiceGrpcTransport, "grpc"), + ( + EngineServiceAsyncClient, + transports.EngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (EngineServiceClient, transports.EngineServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + EngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceClient), +) +@mock.patch.object( + EngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceAsyncClient), +) +def test_engine_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(EngineServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(EngineServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (EngineServiceClient, transports.EngineServiceGrpcTransport, "grpc", "true"), + ( + EngineServiceAsyncClient, + transports.EngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (EngineServiceClient, transports.EngineServiceGrpcTransport, "grpc", "false"), + ( + EngineServiceAsyncClient, + transports.EngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (EngineServiceClient, transports.EngineServiceRestTransport, "rest", "true"), + (EngineServiceClient, transports.EngineServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + EngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceClient), +) +@mock.patch.object( + EngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_engine_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [EngineServiceClient, EngineServiceAsyncClient] +) +@mock.patch.object( + EngineServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(EngineServiceClient), +) +@mock.patch.object( + EngineServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(EngineServiceAsyncClient), +) +def test_engine_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [EngineServiceClient, EngineServiceAsyncClient] +) +@mock.patch.object( + EngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceClient), +) +@mock.patch.object( + EngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EngineServiceAsyncClient), +) +def test_engine_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = EngineServiceClient._DEFAULT_UNIVERSE + default_endpoint = EngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = EngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (EngineServiceClient, transports.EngineServiceGrpcTransport, "grpc"), + ( + EngineServiceAsyncClient, + transports.EngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (EngineServiceClient, transports.EngineServiceRestTransport, "rest"), + ], +) +def test_engine_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + EngineServiceClient, + transports.EngineServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + EngineServiceAsyncClient, + transports.EngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (EngineServiceClient, transports.EngineServiceRestTransport, "rest", None), + ], +) +def test_engine_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_engine_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.engine_service.transports.EngineServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = EngineServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + EngineServiceClient, + transports.EngineServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + EngineServiceAsyncClient, + transports.EngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_engine_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.CreateEngineRequest, + dict, + ], +) +def test_create_engine(request_type, transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.CreateEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_engine_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_engine), "__call__") as call: + client.create_engine() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.CreateEngineRequest() + + +@pytest.mark.asyncio +async def test_create_engine_async( + transport: str = "grpc_asyncio", request_type=engine_service.CreateEngineRequest +): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.CreateEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_engine_async_from_dict(): + await test_create_engine_async(request_type=dict) + + +def test_create_engine_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.CreateEngineRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_engine), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_engine_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.CreateEngineRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_engine), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_engine_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_engine( + parent="parent_value", + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + engine_id="engine_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].engine + mock_val = gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].engine_id + mock_val = "engine_id_value" + assert arg == mock_val + + +def test_create_engine_flattened_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_engine( + engine_service.CreateEngineRequest(), + parent="parent_value", + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + engine_id="engine_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_engine_flattened_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_engine( + parent="parent_value", + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + engine_id="engine_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].engine + mock_val = gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].engine_id + mock_val = "engine_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_engine_flattened_error_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_engine( + engine_service.CreateEngineRequest(), + parent="parent_value", + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + engine_id="engine_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.DeleteEngineRequest, + dict, + ], +) +def test_delete_engine(request_type, transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.DeleteEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_engine_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_engine), "__call__") as call: + client.delete_engine() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.DeleteEngineRequest() + + +@pytest.mark.asyncio +async def test_delete_engine_async( + transport: str = "grpc_asyncio", request_type=engine_service.DeleteEngineRequest +): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.DeleteEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_engine_async_from_dict(): + await test_delete_engine_async(request_type=dict) + + +def test_delete_engine_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.DeleteEngineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_engine), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_engine_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.DeleteEngineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_engine), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_engine_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_engine( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_engine_flattened_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_engine( + engine_service.DeleteEngineRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_engine_flattened_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_engine( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_engine_flattened_error_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_engine( + engine_service.DeleteEngineRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.UpdateEngineRequest, + dict, + ], +) +def test_update_engine(request_type, transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_engine.Engine( + name="name_value", + display_name="display_name_value", + data_store_ids=["data_store_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + industry_vertical=common.IndustryVertical.GENERIC, + ) + response = client.update_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.UpdateEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_engine.Engine) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_store_ids == ["data_store_ids_value"] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.industry_vertical == common.IndustryVertical.GENERIC + + +def test_update_engine_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_engine), "__call__") as call: + client.update_engine() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.UpdateEngineRequest() + + +@pytest.mark.asyncio +async def test_update_engine_async( + transport: str = "grpc_asyncio", request_type=engine_service.UpdateEngineRequest +): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_engine.Engine( + name="name_value", + display_name="display_name_value", + data_store_ids=["data_store_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + industry_vertical=common.IndustryVertical.GENERIC, + ) + ) + response = await client.update_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.UpdateEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_engine.Engine) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_store_ids == ["data_store_ids_value"] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.industry_vertical == common.IndustryVertical.GENERIC + + +@pytest.mark.asyncio +async def test_update_engine_async_from_dict(): + await test_update_engine_async(request_type=dict) + + +def test_update_engine_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.UpdateEngineRequest() + + request.engine.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_engine), "__call__") as call: + call.return_value = gcd_engine.Engine() + client.update_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "engine.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_engine_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.UpdateEngineRequest() + + request.engine.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_engine), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_engine.Engine()) + await client.update_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "engine.name=name_value", + ) in kw["metadata"] + + +def test_update_engine_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_engine.Engine() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_engine( + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].engine + mock_val = gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_engine_flattened_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_engine( + engine_service.UpdateEngineRequest(), + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_engine_flattened_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_engine.Engine() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_engine.Engine()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_engine( + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].engine + mock_val = gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_engine_flattened_error_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_engine( + engine_service.UpdateEngineRequest(), + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.GetEngineRequest, + dict, + ], +) +def test_get_engine(request_type, transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = engine.Engine( + name="name_value", + display_name="display_name_value", + data_store_ids=["data_store_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + industry_vertical=common.IndustryVertical.GENERIC, + ) + response = client.get_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.GetEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, engine.Engine) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_store_ids == ["data_store_ids_value"] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.industry_vertical == common.IndustryVertical.GENERIC + + +def test_get_engine_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_engine), "__call__") as call: + client.get_engine() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.GetEngineRequest() + + +@pytest.mark.asyncio +async def test_get_engine_async( + transport: str = "grpc_asyncio", request_type=engine_service.GetEngineRequest +): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + engine.Engine( + name="name_value", + display_name="display_name_value", + data_store_ids=["data_store_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + industry_vertical=common.IndustryVertical.GENERIC, + ) + ) + response = await client.get_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.GetEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, engine.Engine) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_store_ids == ["data_store_ids_value"] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.industry_vertical == common.IndustryVertical.GENERIC + + +@pytest.mark.asyncio +async def test_get_engine_async_from_dict(): + await test_get_engine_async(request_type=dict) + + +def test_get_engine_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.GetEngineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_engine), "__call__") as call: + call.return_value = engine.Engine() + client.get_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_engine_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.GetEngineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_engine), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(engine.Engine()) + await client.get_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_engine_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = engine.Engine() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_engine( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_engine_flattened_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_engine( + engine_service.GetEngineRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_engine_flattened_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_engine), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = engine.Engine() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(engine.Engine()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_engine( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_engine_flattened_error_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_engine( + engine_service.GetEngineRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.ListEnginesRequest, + dict, + ], +) +def test_list_engines(request_type, transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = engine_service.ListEnginesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_engines(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.ListEnginesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnginesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_engines_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + client.list_engines() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.ListEnginesRequest() + + +@pytest.mark.asyncio +async def test_list_engines_async( + transport: str = "grpc_asyncio", request_type=engine_service.ListEnginesRequest +): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + engine_service.ListEnginesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_engines(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == engine_service.ListEnginesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnginesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_engines_async_from_dict(): + await test_list_engines_async(request_type=dict) + + +def test_list_engines_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.ListEnginesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + call.return_value = engine_service.ListEnginesResponse() + client.list_engines(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_engines_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = engine_service.ListEnginesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + engine_service.ListEnginesResponse() + ) + await client.list_engines(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_engines_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = engine_service.ListEnginesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_engines( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_engines_flattened_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_engines( + engine_service.ListEnginesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_engines_flattened_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = engine_service.ListEnginesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + engine_service.ListEnginesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_engines( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_engines_flattened_error_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_engines( + engine_service.ListEnginesRequest(), + parent="parent_value", + ) + + +def test_list_engines_pager(transport_name: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + engine.Engine(), + ], + next_page_token="abc", + ), + engine_service.ListEnginesResponse( + engines=[], + next_page_token="def", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + ], + next_page_token="ghi", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_engines(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, engine.Engine) for i in results) + + +def test_list_engines_pages(transport_name: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_engines), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + engine.Engine(), + ], + next_page_token="abc", + ), + engine_service.ListEnginesResponse( + engines=[], + next_page_token="def", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + ], + next_page_token="ghi", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + ], + ), + RuntimeError, + ) + pages = list(client.list_engines(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_engines_async_pager(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_engines), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + engine.Engine(), + ], + next_page_token="abc", + ), + engine_service.ListEnginesResponse( + engines=[], + next_page_token="def", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + ], + next_page_token="ghi", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_engines( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, engine.Engine) for i in responses) + + +@pytest.mark.asyncio +async def test_list_engines_async_pages(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_engines), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + engine.Engine(), + ], + next_page_token="abc", + ), + engine_service.ListEnginesResponse( + engines=[], + next_page_token="def", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + ], + next_page_token="ghi", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_engines(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.CreateEngineRequest, + dict, + ], +) +def test_create_engine_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/collections/sample3"} + request_init["engine"] = { + "chat_engine_config": { + "agent_creation_config": { + "business": "business_value", + "default_language_code": "default_language_code_value", + "time_zone": "time_zone_value", + "location": "location_value", + }, + "dialogflow_agent_to_link": "dialogflow_agent_to_link_value", + }, + "search_engine_config": {"search_tier": 1, "search_add_ons": [1]}, + "chat_engine_metadata": {"dialogflow_agent": "dialogflow_agent_value"}, + "name": "name_value", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "data_store_ids": ["data_store_ids_value1", "data_store_ids_value2"], + "solution_type": 1, + "industry_vertical": 1, + "common_config": {"company_name": "company_name_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = engine_service.CreateEngineRequest.meta.fields["engine"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["engine"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["engine"][field])): + del request_init["engine"][field][i][subfield] + else: + del request_init["engine"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_engine(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_engine_rest_required_fields( + request_type=engine_service.CreateEngineRequest, +): + transport_class = transports.EngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["engine_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "engineId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "engineId" in jsonified_request + assert jsonified_request["engineId"] == request_init["engine_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["engineId"] = "engine_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_engine._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("engine_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "engineId" in jsonified_request + assert jsonified_request["engineId"] == "engine_id_value" + + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_engine(request) + + expected_params = [ + ( + "engineId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_engine_rest_unset_required_fields(): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_engine._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("engineId",)) + & set( + ( + "parent", + "engine", + "engineId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_engine_rest_interceptors(null_interceptor): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EngineServiceRestInterceptor(), + ) + client = EngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.EngineServiceRestInterceptor, "post_create_engine" + ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "pre_create_engine" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = engine_service.CreateEngineRequest.pb( + engine_service.CreateEngineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = engine_service.CreateEngineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_engine( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_engine_rest_bad_request( + transport: str = "rest", request_type=engine_service.CreateEngineRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/collections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_engine(request) + + +def test_create_engine_rest_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/collections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + engine_id="engine_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_engine(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/collections/*}/engines" + % client.transport._host, + args[1], + ) + + +def test_create_engine_rest_flattened_error(transport: str = "rest"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_engine( + engine_service.CreateEngineRequest(), + parent="parent_value", + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + engine_id="engine_id_value", + ) + + +def test_create_engine_rest_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.DeleteEngineRequest, + dict, + ], +) +def test_delete_engine_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_engine(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_engine_rest_required_fields( + request_type=engine_service.DeleteEngineRequest, +): + transport_class = transports.EngineServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_engine(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_engine_rest_unset_required_fields(): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_engine._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_engine_rest_interceptors(null_interceptor): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EngineServiceRestInterceptor(), + ) + client = EngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.EngineServiceRestInterceptor, "post_delete_engine" + ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "pre_delete_engine" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = engine_service.DeleteEngineRequest.pb( + engine_service.DeleteEngineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = engine_service.DeleteEngineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_engine( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_engine_rest_bad_request( + transport: str = "rest", request_type=engine_service.DeleteEngineRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_engine(request) + + +def test_delete_engine_rest_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_engine(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/collections/*/engines/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_engine_rest_flattened_error(transport: str = "rest"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_engine( + engine_service.DeleteEngineRequest(), + name="name_value", + ) + + +def test_delete_engine_rest_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.UpdateEngineRequest, + dict, + ], +) +def test_update_engine_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "engine": { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + } + request_init["engine"] = { + "chat_engine_config": { + "agent_creation_config": { + "business": "business_value", + "default_language_code": "default_language_code_value", + "time_zone": "time_zone_value", + "location": "location_value", + }, + "dialogflow_agent_to_link": "dialogflow_agent_to_link_value", + }, + "search_engine_config": {"search_tier": 1, "search_add_ons": [1]}, + "chat_engine_metadata": {"dialogflow_agent": "dialogflow_agent_value"}, + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "data_store_ids": ["data_store_ids_value1", "data_store_ids_value2"], + "solution_type": 1, + "industry_vertical": 1, + "common_config": {"company_name": "company_name_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = engine_service.UpdateEngineRequest.meta.fields["engine"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["engine"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["engine"][field])): + del request_init["engine"][field][i][subfield] + else: + del request_init["engine"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_engine.Engine( + name="name_value", + display_name="display_name_value", + data_store_ids=["data_store_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + industry_vertical=common.IndustryVertical.GENERIC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_engine.Engine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_engine(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_engine.Engine) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_store_ids == ["data_store_ids_value"] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.industry_vertical == common.IndustryVertical.GENERIC + + +def test_update_engine_rest_required_fields( + request_type=engine_service.UpdateEngineRequest, +): + transport_class = transports.EngineServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_engine._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_engine.Engine() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcd_engine.Engine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_engine(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_engine_rest_unset_required_fields(): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_engine._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("engine",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_engine_rest_interceptors(null_interceptor): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EngineServiceRestInterceptor(), + ) + client = EngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_update_engine" + ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "pre_update_engine" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = engine_service.UpdateEngineRequest.pb( + engine_service.UpdateEngineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_engine.Engine.to_json(gcd_engine.Engine()) + + request = engine_service.UpdateEngineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_engine.Engine() + + client.update_engine( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_engine_rest_bad_request( + transport: str = "rest", request_type=engine_service.UpdateEngineRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "engine": { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_engine(request) + + +def test_update_engine_rest_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_engine.Engine() + + # get arguments that satisfy an http rule for this method + sample_request = { + "engine": { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_engine.Engine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_engine(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{engine.name=projects/*/locations/*/collections/*/engines/*}" + % client.transport._host, + args[1], + ) + + +def test_update_engine_rest_flattened_error(transport: str = "rest"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_engine( + engine_service.UpdateEngineRequest(), + engine=gcd_engine.Engine( + chat_engine_config=gcd_engine.Engine.ChatEngineConfig( + agent_creation_config=gcd_engine.Engine.ChatEngineConfig.AgentCreationConfig( + business="business_value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_engine_rest_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.GetEngineRequest, + dict, + ], +) +def test_get_engine_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = engine.Engine( + name="name_value", + display_name="display_name_value", + data_store_ids=["data_store_ids_value"], + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + industry_vertical=common.IndustryVertical.GENERIC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = engine.Engine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_engine(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, engine.Engine) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_store_ids == ["data_store_ids_value"] + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.industry_vertical == common.IndustryVertical.GENERIC + + +def test_get_engine_rest_required_fields(request_type=engine_service.GetEngineRequest): + transport_class = transports.EngineServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = engine.Engine() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = engine.Engine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_engine(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_engine_rest_unset_required_fields(): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_engine._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_engine_rest_interceptors(null_interceptor): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EngineServiceRestInterceptor(), + ) + client = EngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_get_engine" + ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "pre_get_engine" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = engine_service.GetEngineRequest.pb( + engine_service.GetEngineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = engine.Engine.to_json(engine.Engine()) + + request = engine_service.GetEngineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = engine.Engine() + + client.get_engine( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_engine_rest_bad_request( + transport: str = "rest", request_type=engine_service.GetEngineRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_engine(request) + + +def test_get_engine_rest_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = engine.Engine() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/collections/sample3/engines/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = engine.Engine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_engine(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/collections/*/engines/*}" + % client.transport._host, + args[1], + ) + + +def test_get_engine_rest_flattened_error(transport: str = "rest"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_engine( + engine_service.GetEngineRequest(), + name="name_value", + ) + + +def test_get_engine_rest_error(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + engine_service.ListEnginesRequest, + dict, + ], +) +def test_list_engines_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/collections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = engine_service.ListEnginesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = engine_service.ListEnginesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_engines(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnginesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_engines_rest_required_fields( + request_type=engine_service.ListEnginesRequest, +): + transport_class = transports.EngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_engines._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_engines._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = engine_service.ListEnginesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = engine_service.ListEnginesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_engines(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_engines_rest_unset_required_fields(): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_engines._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_engines_rest_interceptors(null_interceptor): + transport = transports.EngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EngineServiceRestInterceptor(), + ) + client = EngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_list_engines" + ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "pre_list_engines" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = engine_service.ListEnginesRequest.pb( + engine_service.ListEnginesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = engine_service.ListEnginesResponse.to_json( + engine_service.ListEnginesResponse() + ) + + request = engine_service.ListEnginesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = engine_service.ListEnginesResponse() + + client.list_engines( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_engines_rest_bad_request( + transport: str = "rest", request_type=engine_service.ListEnginesRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/collections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_engines(request) + + +def test_list_engines_rest_flattened(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = engine_service.ListEnginesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/collections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = engine_service.ListEnginesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_engines(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/collections/*}/engines" + % client.transport._host, + args[1], + ) + + +def test_list_engines_rest_flattened_error(transport: str = "rest"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_engines( + engine_service.ListEnginesRequest(), + parent="parent_value", + ) + + +def test_list_engines_rest_pager(transport: str = "rest"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + engine.Engine(), + ], + next_page_token="abc", + ), + engine_service.ListEnginesResponse( + engines=[], + next_page_token="def", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + ], + next_page_token="ghi", + ), + engine_service.ListEnginesResponse( + engines=[ + engine.Engine(), + engine.Engine(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + engine_service.ListEnginesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/collections/sample3" + } + + pager = client.list_engines(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, engine.Engine) for i in results) + + pages = list(client.list_engines(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.EngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.EngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EngineServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.EngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EngineServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EngineServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.EngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EngineServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.EngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = EngineServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.EngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.EngineServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EngineServiceGrpcTransport, + transports.EngineServiceGrpcAsyncIOTransport, + transports.EngineServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = EngineServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.EngineServiceGrpcTransport, + ) + + +def test_engine_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.EngineServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_engine_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.engine_service.transports.EngineServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.EngineServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_engine", + "delete_engine", + "update_engine", + "get_engine", + "list_engines", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_engine_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1beta.services.engine_service.transports.EngineServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EngineServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_engine_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1beta.services.engine_service.transports.EngineServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EngineServiceTransport() + adc.assert_called_once() + + +def test_engine_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + EngineServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EngineServiceGrpcTransport, + transports.EngineServiceGrpcAsyncIOTransport, + ], +) +def test_engine_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EngineServiceGrpcTransport, + transports.EngineServiceGrpcAsyncIOTransport, + transports.EngineServiceRestTransport, + ], +) +def test_engine_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.EngineServiceGrpcTransport, grpc_helpers), + (transports.EngineServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_engine_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EngineServiceGrpcTransport, + transports.EngineServiceGrpcAsyncIOTransport, + ], +) +def test_engine_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_engine_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.EngineServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_engine_service_rest_lro_client(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_engine_service_host_no_port(transport_name): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_engine_service_host_with_port(transport_name): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_engine_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = EngineServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = EngineServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_engine._session + session2 = client2.transport.create_engine._session + assert session1 != session2 + session1 = client1.transport.delete_engine._session + session2 = client2.transport.delete_engine._session + assert session1 != session2 + session1 = client1.transport.update_engine._session + session2 = client2.transport.update_engine._session + assert session1 != session2 + session1 = client1.transport.get_engine._session + session2 = client2.transport.get_engine._session + assert session1 != session2 + session1 = client1.transport.list_engines._session + session2 = client2.transport.list_engines._session + assert session1 != session2 + + +def test_engine_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.EngineServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_engine_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.EngineServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.EngineServiceGrpcTransport, + transports.EngineServiceGrpcAsyncIOTransport, + ], +) +def test_engine_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.EngineServiceGrpcTransport, + transports.EngineServiceGrpcAsyncIOTransport, + ], +) +def test_engine_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_engine_service_grpc_lro_client(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_engine_service_grpc_lro_async_client(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_collection_path(): + project = "squid" + location = "clam" + collection = "whelk" + expected = ( + "projects/{project}/locations/{location}/collections/{collection}".format( + project=project, + location=location, + collection=collection, + ) + ) + actual = EngineServiceClient.collection_path(project, location, collection) + assert expected == actual + + +def test_parse_collection_path(): + expected = { + "project": "octopus", + "location": "oyster", + "collection": "nudibranch", + } + path = EngineServiceClient.collection_path(**expected) + + # Check that the path construction is reversible. + actual = EngineServiceClient.parse_collection_path(path) + assert expected == actual + + +def test_engine_path(): + project = "cuttlefish" + location = "mussel" + collection = "winkle" + engine = "nautilus" + expected = "projects/{project}/locations/{location}/collections/{collection}/engines/{engine}".format( + project=project, + location=location, + collection=collection, + engine=engine, + ) + actual = EngineServiceClient.engine_path(project, location, collection, engine) + assert expected == actual + + +def test_parse_engine_path(): + expected = { + "project": "scallop", + "location": "abalone", + "collection": "squid", + "engine": "clam", + } + path = EngineServiceClient.engine_path(**expected) + + # Check that the path construction is reversible. + actual = EngineServiceClient.parse_engine_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = EngineServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = EngineServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = EngineServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = EngineServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = EngineServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = EngineServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = EngineServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = EngineServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = EngineServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = EngineServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = EngineServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = EngineServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = EngineServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = EngineServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = EngineServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.EngineServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.EngineServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = EngineServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = EngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = EngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (EngineServiceClient, transports.EngineServiceGrpcTransport), + (EngineServiceAsyncClient, transports.EngineServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py index 304cf4e9100f..921ef7e669e4 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py @@ -2333,7 +2333,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -2363,7 +2363,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2396,7 +2396,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -2426,7 +2426,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py index ed19fefb24a3..22a09f600dd7 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py @@ -4807,7 +4807,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -4837,7 +4837,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4870,7 +4870,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -4900,7 +4900,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py index 308ea8fd304e..2dd84030cad8 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py @@ -2535,7 +2535,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -2565,7 +2565,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2598,7 +2598,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -2628,7 +2628,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py new file mode 100644 index 000000000000..8d79b0a1c194 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py @@ -0,0 +1,4613 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1beta.services.serving_config_service import ( + ServingConfigServiceAsyncClient, + ServingConfigServiceClient, + pagers, + transports, +) +from google.cloud.discoveryengine_v1beta.types import ( + serving_config as gcd_serving_config, +) +from google.cloud.discoveryengine_v1beta.types import common, search_service +from google.cloud.discoveryengine_v1beta.types import serving_config +from google.cloud.discoveryengine_v1beta.types import serving_config_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ServingConfigServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ServingConfigServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ServingConfigServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ServingConfigServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ServingConfigServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ServingConfigServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert ServingConfigServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ServingConfigServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ServingConfigServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ServingConfigServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ServingConfigServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ServingConfigServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ServingConfigServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ServingConfigServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ServingConfigServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ServingConfigServiceClient._get_client_cert_source(None, False) is None + assert ( + ServingConfigServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + ServingConfigServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ServingConfigServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ServingConfigServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ServingConfigServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ServingConfigServiceClient), +) +@mock.patch.object( + ServingConfigServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ServingConfigServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ServingConfigServiceClient._DEFAULT_UNIVERSE + default_endpoint = ServingConfigServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ServingConfigServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ServingConfigServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ServingConfigServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ServingConfigServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ServingConfigServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + ServingConfigServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == ServingConfigServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ServingConfigServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ServingConfigServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ServingConfigServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ServingConfigServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ServingConfigServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ServingConfigServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ServingConfigServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ServingConfigServiceClient._get_universe_domain(None, None) + == ServingConfigServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ServingConfigServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ServingConfigServiceClient, + transports.ServingConfigServiceGrpcTransport, + "grpc", + ), + ( + ServingConfigServiceClient, + transports.ServingConfigServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ServingConfigServiceClient, "grpc"), + (ServingConfigServiceAsyncClient, "grpc_asyncio"), + (ServingConfigServiceClient, "rest"), + ], +) +def test_serving_config_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ServingConfigServiceGrpcTransport, "grpc"), + (transports.ServingConfigServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ServingConfigServiceRestTransport, "rest"), + ], +) +def test_serving_config_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ServingConfigServiceClient, "grpc"), + (ServingConfigServiceAsyncClient, "grpc_asyncio"), + (ServingConfigServiceClient, "rest"), + ], +) +def test_serving_config_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_serving_config_service_client_get_transport_class(): + transport = ServingConfigServiceClient.get_transport_class() + available_transports = [ + transports.ServingConfigServiceGrpcTransport, + transports.ServingConfigServiceRestTransport, + ] + assert transport in available_transports + + transport = ServingConfigServiceClient.get_transport_class("grpc") + assert transport == transports.ServingConfigServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ServingConfigServiceClient, + transports.ServingConfigServiceGrpcTransport, + "grpc", + ), + ( + ServingConfigServiceAsyncClient, + transports.ServingConfigServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + ServingConfigServiceClient, + transports.ServingConfigServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + ServingConfigServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ServingConfigServiceClient), +) +@mock.patch.object( + ServingConfigServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ServingConfigServiceAsyncClient), +) +def test_serving_config_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ServingConfigServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ServingConfigServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ServingConfigServiceClient, + transports.ServingConfigServiceGrpcTransport, + "grpc", + "true", + ), + ( + ServingConfigServiceAsyncClient, + transports.ServingConfigServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + ServingConfigServiceClient, + transports.ServingConfigServiceGrpcTransport, + "grpc", + "false", + ), + ( + ServingConfigServiceAsyncClient, + transports.ServingConfigServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + ServingConfigServiceClient, + transports.ServingConfigServiceRestTransport, + "rest", + "true", + ), + ( + ServingConfigServiceClient, + transports.ServingConfigServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + ServingConfigServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ServingConfigServiceClient), +) +@mock.patch.object( + ServingConfigServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ServingConfigServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_serving_config_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ServingConfigServiceClient, ServingConfigServiceAsyncClient] +) +@mock.patch.object( + ServingConfigServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ServingConfigServiceClient), +) +@mock.patch.object( + ServingConfigServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ServingConfigServiceAsyncClient), +) +def test_serving_config_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ServingConfigServiceClient, ServingConfigServiceAsyncClient] +) +@mock.patch.object( + ServingConfigServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ServingConfigServiceClient), +) +@mock.patch.object( + ServingConfigServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ServingConfigServiceAsyncClient), +) +def test_serving_config_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ServingConfigServiceClient._DEFAULT_UNIVERSE + default_endpoint = ServingConfigServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ServingConfigServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ServingConfigServiceClient, + transports.ServingConfigServiceGrpcTransport, + "grpc", + ), + ( + ServingConfigServiceAsyncClient, + transports.ServingConfigServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + ServingConfigServiceClient, + transports.ServingConfigServiceRestTransport, + "rest", + ), + ], +) +def test_serving_config_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ServingConfigServiceClient, + transports.ServingConfigServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ServingConfigServiceAsyncClient, + transports.ServingConfigServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + ServingConfigServiceClient, + transports.ServingConfigServiceRestTransport, + "rest", + None, + ), + ], +) +def test_serving_config_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_serving_config_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.serving_config_service.transports.ServingConfigServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ServingConfigServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ServingConfigServiceClient, + transports.ServingConfigServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ServingConfigServiceAsyncClient, + transports.ServingConfigServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_serving_config_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + serving_config_service.UpdateServingConfigRequest, + dict, + ], +) +def test_update_serving_config(request_type, transport: str = "grpc"): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_serving_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_serving_config.ServingConfig( + name="name_value", + display_name="display_name_value", + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + model_id="model_id_value", + diversity_level="diversity_level_value", + ranking_expression="ranking_expression_value", + filter_control_ids=["filter_control_ids_value"], + boost_control_ids=["boost_control_ids_value"], + redirect_control_ids=["redirect_control_ids_value"], + synonyms_control_ids=["synonyms_control_ids_value"], + oneway_synonyms_control_ids=["oneway_synonyms_control_ids_value"], + dissociate_control_ids=["dissociate_control_ids_value"], + replacement_control_ids=["replacement_control_ids_value"], + ignore_control_ids=["ignore_control_ids_value"], + ) + response = client.update_serving_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == serving_config_service.UpdateServingConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_serving_config.ServingConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.model_id == "model_id_value" + assert response.diversity_level == "diversity_level_value" + assert response.ranking_expression == "ranking_expression_value" + assert response.filter_control_ids == ["filter_control_ids_value"] + assert response.boost_control_ids == ["boost_control_ids_value"] + assert response.redirect_control_ids == ["redirect_control_ids_value"] + assert response.synonyms_control_ids == ["synonyms_control_ids_value"] + assert response.oneway_synonyms_control_ids == ["oneway_synonyms_control_ids_value"] + assert response.dissociate_control_ids == ["dissociate_control_ids_value"] + assert response.replacement_control_ids == ["replacement_control_ids_value"] + assert response.ignore_control_ids == ["ignore_control_ids_value"] + + +def test_update_serving_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_serving_config), "__call__" + ) as call: + client.update_serving_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == serving_config_service.UpdateServingConfigRequest() + + +@pytest.mark.asyncio +async def test_update_serving_config_async( + transport: str = "grpc_asyncio", + request_type=serving_config_service.UpdateServingConfigRequest, +): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_serving_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_serving_config.ServingConfig( + name="name_value", + display_name="display_name_value", + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + model_id="model_id_value", + diversity_level="diversity_level_value", + ranking_expression="ranking_expression_value", + filter_control_ids=["filter_control_ids_value"], + boost_control_ids=["boost_control_ids_value"], + redirect_control_ids=["redirect_control_ids_value"], + synonyms_control_ids=["synonyms_control_ids_value"], + oneway_synonyms_control_ids=["oneway_synonyms_control_ids_value"], + dissociate_control_ids=["dissociate_control_ids_value"], + replacement_control_ids=["replacement_control_ids_value"], + ignore_control_ids=["ignore_control_ids_value"], + ) + ) + response = await client.update_serving_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == serving_config_service.UpdateServingConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_serving_config.ServingConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.model_id == "model_id_value" + assert response.diversity_level == "diversity_level_value" + assert response.ranking_expression == "ranking_expression_value" + assert response.filter_control_ids == ["filter_control_ids_value"] + assert response.boost_control_ids == ["boost_control_ids_value"] + assert response.redirect_control_ids == ["redirect_control_ids_value"] + assert response.synonyms_control_ids == ["synonyms_control_ids_value"] + assert response.oneway_synonyms_control_ids == ["oneway_synonyms_control_ids_value"] + assert response.dissociate_control_ids == ["dissociate_control_ids_value"] + assert response.replacement_control_ids == ["replacement_control_ids_value"] + assert response.ignore_control_ids == ["ignore_control_ids_value"] + + +@pytest.mark.asyncio +async def test_update_serving_config_async_from_dict(): + await test_update_serving_config_async(request_type=dict) + + +def test_update_serving_config_field_headers(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serving_config_service.UpdateServingConfigRequest() + + request.serving_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_serving_config), "__call__" + ) as call: + call.return_value = gcd_serving_config.ServingConfig() + client.update_serving_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "serving_config.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_serving_config_field_headers_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serving_config_service.UpdateServingConfigRequest() + + request.serving_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_serving_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_serving_config.ServingConfig() + ) + await client.update_serving_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "serving_config.name=name_value", + ) in kw["metadata"] + + +def test_update_serving_config_flattened(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_serving_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_serving_config.ServingConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_serving_config( + serving_config=gcd_serving_config.ServingConfig( + media_config=gcd_serving_config.ServingConfig.MediaConfig( + content_watched_percentage_threshold=0.3811 + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].serving_config + mock_val = gcd_serving_config.ServingConfig( + media_config=gcd_serving_config.ServingConfig.MediaConfig( + content_watched_percentage_threshold=0.3811 + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_serving_config_flattened_error(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_serving_config( + serving_config_service.UpdateServingConfigRequest(), + serving_config=gcd_serving_config.ServingConfig( + media_config=gcd_serving_config.ServingConfig.MediaConfig( + content_watched_percentage_threshold=0.3811 + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_serving_config_flattened_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_serving_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_serving_config.ServingConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_serving_config.ServingConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_serving_config( + serving_config=gcd_serving_config.ServingConfig( + media_config=gcd_serving_config.ServingConfig.MediaConfig( + content_watched_percentage_threshold=0.3811 + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].serving_config + mock_val = gcd_serving_config.ServingConfig( + media_config=gcd_serving_config.ServingConfig.MediaConfig( + content_watched_percentage_threshold=0.3811 + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_serving_config_flattened_error_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_serving_config( + serving_config_service.UpdateServingConfigRequest(), + serving_config=gcd_serving_config.ServingConfig( + media_config=gcd_serving_config.ServingConfig.MediaConfig( + content_watched_percentage_threshold=0.3811 + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + serving_config_service.GetServingConfigRequest, + dict, + ], +) +def test_get_serving_config(request_type, transport: str = "grpc"): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_serving_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = serving_config.ServingConfig( + name="name_value", + display_name="display_name_value", + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + model_id="model_id_value", + diversity_level="diversity_level_value", + ranking_expression="ranking_expression_value", + filter_control_ids=["filter_control_ids_value"], + boost_control_ids=["boost_control_ids_value"], + redirect_control_ids=["redirect_control_ids_value"], + synonyms_control_ids=["synonyms_control_ids_value"], + oneway_synonyms_control_ids=["oneway_synonyms_control_ids_value"], + dissociate_control_ids=["dissociate_control_ids_value"], + replacement_control_ids=["replacement_control_ids_value"], + ignore_control_ids=["ignore_control_ids_value"], + ) + response = client.get_serving_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == serving_config_service.GetServingConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, serving_config.ServingConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.model_id == "model_id_value" + assert response.diversity_level == "diversity_level_value" + assert response.ranking_expression == "ranking_expression_value" + assert response.filter_control_ids == ["filter_control_ids_value"] + assert response.boost_control_ids == ["boost_control_ids_value"] + assert response.redirect_control_ids == ["redirect_control_ids_value"] + assert response.synonyms_control_ids == ["synonyms_control_ids_value"] + assert response.oneway_synonyms_control_ids == ["oneway_synonyms_control_ids_value"] + assert response.dissociate_control_ids == ["dissociate_control_ids_value"] + assert response.replacement_control_ids == ["replacement_control_ids_value"] + assert response.ignore_control_ids == ["ignore_control_ids_value"] + + +def test_get_serving_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_serving_config), "__call__" + ) as call: + client.get_serving_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == serving_config_service.GetServingConfigRequest() + + +@pytest.mark.asyncio +async def test_get_serving_config_async( + transport: str = "grpc_asyncio", + request_type=serving_config_service.GetServingConfigRequest, +): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_serving_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + serving_config.ServingConfig( + name="name_value", + display_name="display_name_value", + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + model_id="model_id_value", + diversity_level="diversity_level_value", + ranking_expression="ranking_expression_value", + filter_control_ids=["filter_control_ids_value"], + boost_control_ids=["boost_control_ids_value"], + redirect_control_ids=["redirect_control_ids_value"], + synonyms_control_ids=["synonyms_control_ids_value"], + oneway_synonyms_control_ids=["oneway_synonyms_control_ids_value"], + dissociate_control_ids=["dissociate_control_ids_value"], + replacement_control_ids=["replacement_control_ids_value"], + ignore_control_ids=["ignore_control_ids_value"], + ) + ) + response = await client.get_serving_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == serving_config_service.GetServingConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, serving_config.ServingConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.model_id == "model_id_value" + assert response.diversity_level == "diversity_level_value" + assert response.ranking_expression == "ranking_expression_value" + assert response.filter_control_ids == ["filter_control_ids_value"] + assert response.boost_control_ids == ["boost_control_ids_value"] + assert response.redirect_control_ids == ["redirect_control_ids_value"] + assert response.synonyms_control_ids == ["synonyms_control_ids_value"] + assert response.oneway_synonyms_control_ids == ["oneway_synonyms_control_ids_value"] + assert response.dissociate_control_ids == ["dissociate_control_ids_value"] + assert response.replacement_control_ids == ["replacement_control_ids_value"] + assert response.ignore_control_ids == ["ignore_control_ids_value"] + + +@pytest.mark.asyncio +async def test_get_serving_config_async_from_dict(): + await test_get_serving_config_async(request_type=dict) + + +def test_get_serving_config_field_headers(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serving_config_service.GetServingConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_serving_config), "__call__" + ) as call: + call.return_value = serving_config.ServingConfig() + client.get_serving_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_serving_config_field_headers_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serving_config_service.GetServingConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_serving_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + serving_config.ServingConfig() + ) + await client.get_serving_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_serving_config_flattened(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_serving_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = serving_config.ServingConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_serving_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_serving_config_flattened_error(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_serving_config( + serving_config_service.GetServingConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_serving_config_flattened_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_serving_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = serving_config.ServingConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + serving_config.ServingConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_serving_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_serving_config_flattened_error_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_serving_config( + serving_config_service.GetServingConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + serving_config_service.ListServingConfigsRequest, + dict, + ], +) +def test_list_serving_configs(request_type, transport: str = "grpc"): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_serving_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = serving_config_service.ListServingConfigsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_serving_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == serving_config_service.ListServingConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServingConfigsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_serving_configs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_serving_configs), "__call__" + ) as call: + client.list_serving_configs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == serving_config_service.ListServingConfigsRequest() + + +@pytest.mark.asyncio +async def test_list_serving_configs_async( + transport: str = "grpc_asyncio", + request_type=serving_config_service.ListServingConfigsRequest, +): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_serving_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + serving_config_service.ListServingConfigsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_serving_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == serving_config_service.ListServingConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServingConfigsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_serving_configs_async_from_dict(): + await test_list_serving_configs_async(request_type=dict) + + +def test_list_serving_configs_field_headers(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serving_config_service.ListServingConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_serving_configs), "__call__" + ) as call: + call.return_value = serving_config_service.ListServingConfigsResponse() + client.list_serving_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_serving_configs_field_headers_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = serving_config_service.ListServingConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_serving_configs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + serving_config_service.ListServingConfigsResponse() + ) + await client.list_serving_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_serving_configs_flattened(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_serving_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = serving_config_service.ListServingConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_serving_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_serving_configs_flattened_error(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_serving_configs( + serving_config_service.ListServingConfigsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_serving_configs_flattened_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_serving_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = serving_config_service.ListServingConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + serving_config_service.ListServingConfigsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_serving_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_serving_configs_flattened_error_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_serving_configs( + serving_config_service.ListServingConfigsRequest(), + parent="parent_value", + ) + + +def test_list_serving_configs_pager(transport_name: str = "grpc"): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_serving_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + serving_config_service.ListServingConfigsResponse( + serving_configs=[ + serving_config.ServingConfig(), + serving_config.ServingConfig(), + serving_config.ServingConfig(), + ], + next_page_token="abc", + ), + serving_config_service.ListServingConfigsResponse( + serving_configs=[], + next_page_token="def", + ), + serving_config_service.ListServingConfigsResponse( + serving_configs=[ + serving_config.ServingConfig(), + ], + next_page_token="ghi", + ), + serving_config_service.ListServingConfigsResponse( + serving_configs=[ + serving_config.ServingConfig(), + serving_config.ServingConfig(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_serving_configs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, serving_config.ServingConfig) for i in results) + + +def test_list_serving_configs_pages(transport_name: str = "grpc"): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_serving_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + serving_config_service.ListServingConfigsResponse( + serving_configs=[ + serving_config.ServingConfig(), + serving_config.ServingConfig(), + serving_config.ServingConfig(), + ], + next_page_token="abc", + ), + serving_config_service.ListServingConfigsResponse( + serving_configs=[], + next_page_token="def", + ), + serving_config_service.ListServingConfigsResponse( + serving_configs=[ + serving_config.ServingConfig(), + ], + next_page_token="ghi", + ), + serving_config_service.ListServingConfigsResponse( + serving_configs=[ + serving_config.ServingConfig(), + serving_config.ServingConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_serving_configs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_serving_configs_async_pager(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_serving_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + serving_config_service.ListServingConfigsResponse( + serving_configs=[ + serving_config.ServingConfig(), + serving_config.ServingConfig(), + serving_config.ServingConfig(), + ], + next_page_token="abc", + ), + serving_config_service.ListServingConfigsResponse( + serving_configs=[], + next_page_token="def", + ), + serving_config_service.ListServingConfigsResponse( + serving_configs=[ + serving_config.ServingConfig(), + ], + next_page_token="ghi", + ), + serving_config_service.ListServingConfigsResponse( + serving_configs=[ + serving_config.ServingConfig(), + serving_config.ServingConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_serving_configs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, serving_config.ServingConfig) for i in responses) + + +@pytest.mark.asyncio +async def test_list_serving_configs_async_pages(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_serving_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + serving_config_service.ListServingConfigsResponse( + serving_configs=[ + serving_config.ServingConfig(), + serving_config.ServingConfig(), + serving_config.ServingConfig(), + ], + next_page_token="abc", + ), + serving_config_service.ListServingConfigsResponse( + serving_configs=[], + next_page_token="def", + ), + serving_config_service.ListServingConfigsResponse( + serving_configs=[ + serving_config.ServingConfig(), + ], + next_page_token="ghi", + ), + serving_config_service.ListServingConfigsResponse( + serving_configs=[ + serving_config.ServingConfig(), + serving_config.ServingConfig(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_serving_configs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + serving_config_service.UpdateServingConfigRequest, + dict, + ], +) +def test_update_serving_config_rest(request_type): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "serving_config": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/servingConfigs/sample4" + } + } + request_init["serving_config"] = { + "media_config": { + "content_watched_percentage_threshold": 0.3811, + "content_watched_seconds_threshold": 0.3508, + "demotion_event_type": "demotion_event_type_value", + "content_freshness_cutoff_days": 3105, + }, + "generic_config": { + "content_search_spec": { + "snippet_spec": { + "max_snippet_count": 1840, + "reference_only": True, + "return_snippet": True, + }, + "summary_spec": { + "summary_result_count": 2196, + "include_citations": True, + "ignore_adversarial_query": True, + "ignore_non_summary_seeking_query": True, + "model_prompt_spec": {"preamble": "preamble_value"}, + "language_code": "language_code_value", + "model_spec": {"version": "version_value"}, + }, + "extractive_content_spec": { + "max_extractive_answer_count": 2907, + "max_extractive_segment_count": 3006, + "return_extractive_segment_score": True, + "num_previous_segments": 2289, + "num_next_segments": 1843, + }, + } + }, + "name": "projects/sample1/locations/sample2/dataStores/sample3/servingConfigs/sample4", + "display_name": "display_name_value", + "solution_type": 1, + "model_id": "model_id_value", + "diversity_level": "diversity_level_value", + "embedding_config": {"field_path": "field_path_value"}, + "ranking_expression": "ranking_expression_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "filter_control_ids": [ + "filter_control_ids_value1", + "filter_control_ids_value2", + ], + "boost_control_ids": ["boost_control_ids_value1", "boost_control_ids_value2"], + "redirect_control_ids": [ + "redirect_control_ids_value1", + "redirect_control_ids_value2", + ], + "synonyms_control_ids": [ + "synonyms_control_ids_value1", + "synonyms_control_ids_value2", + ], + "oneway_synonyms_control_ids": [ + "oneway_synonyms_control_ids_value1", + "oneway_synonyms_control_ids_value2", + ], + "dissociate_control_ids": [ + "dissociate_control_ids_value1", + "dissociate_control_ids_value2", + ], + "replacement_control_ids": [ + "replacement_control_ids_value1", + "replacement_control_ids_value2", + ], + "ignore_control_ids": [ + "ignore_control_ids_value1", + "ignore_control_ids_value2", + ], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = serving_config_service.UpdateServingConfigRequest.meta.fields[ + "serving_config" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["serving_config"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["serving_config"][field])): + del request_init["serving_config"][field][i][subfield] + else: + del request_init["serving_config"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_serving_config.ServingConfig( + name="name_value", + display_name="display_name_value", + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + model_id="model_id_value", + diversity_level="diversity_level_value", + ranking_expression="ranking_expression_value", + filter_control_ids=["filter_control_ids_value"], + boost_control_ids=["boost_control_ids_value"], + redirect_control_ids=["redirect_control_ids_value"], + synonyms_control_ids=["synonyms_control_ids_value"], + oneway_synonyms_control_ids=["oneway_synonyms_control_ids_value"], + dissociate_control_ids=["dissociate_control_ids_value"], + replacement_control_ids=["replacement_control_ids_value"], + ignore_control_ids=["ignore_control_ids_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_serving_config.ServingConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_serving_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_serving_config.ServingConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.model_id == "model_id_value" + assert response.diversity_level == "diversity_level_value" + assert response.ranking_expression == "ranking_expression_value" + assert response.filter_control_ids == ["filter_control_ids_value"] + assert response.boost_control_ids == ["boost_control_ids_value"] + assert response.redirect_control_ids == ["redirect_control_ids_value"] + assert response.synonyms_control_ids == ["synonyms_control_ids_value"] + assert response.oneway_synonyms_control_ids == ["oneway_synonyms_control_ids_value"] + assert response.dissociate_control_ids == ["dissociate_control_ids_value"] + assert response.replacement_control_ids == ["replacement_control_ids_value"] + assert response.ignore_control_ids == ["ignore_control_ids_value"] + + +def test_update_serving_config_rest_required_fields( + request_type=serving_config_service.UpdateServingConfigRequest, +): + transport_class = transports.ServingConfigServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_serving_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_serving_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_serving_config.ServingConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcd_serving_config.ServingConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_serving_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_serving_config_rest_unset_required_fields(): + transport = transports.ServingConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_serving_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("servingConfig",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_serving_config_rest_interceptors(null_interceptor): + transport = transports.ServingConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ServingConfigServiceRestInterceptor(), + ) + client = ServingConfigServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ServingConfigServiceRestInterceptor, "post_update_serving_config" + ) as post, mock.patch.object( + transports.ServingConfigServiceRestInterceptor, "pre_update_serving_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = serving_config_service.UpdateServingConfigRequest.pb( + serving_config_service.UpdateServingConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_serving_config.ServingConfig.to_json( + gcd_serving_config.ServingConfig() + ) + + request = serving_config_service.UpdateServingConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_serving_config.ServingConfig() + + client.update_serving_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_serving_config_rest_bad_request( + transport: str = "rest", + request_type=serving_config_service.UpdateServingConfigRequest, +): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "serving_config": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/servingConfigs/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_serving_config(request) + + +def test_update_serving_config_rest_flattened(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_serving_config.ServingConfig() + + # get arguments that satisfy an http rule for this method + sample_request = { + "serving_config": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/servingConfigs/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + serving_config=gcd_serving_config.ServingConfig( + media_config=gcd_serving_config.ServingConfig.MediaConfig( + content_watched_percentage_threshold=0.3811 + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_serving_config.ServingConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_serving_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{serving_config.name=projects/*/locations/*/dataStores/*/servingConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_update_serving_config_rest_flattened_error(transport: str = "rest"): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_serving_config( + serving_config_service.UpdateServingConfigRequest(), + serving_config=gcd_serving_config.ServingConfig( + media_config=gcd_serving_config.ServingConfig.MediaConfig( + content_watched_percentage_threshold=0.3811 + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_serving_config_rest_error(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + serving_config_service.GetServingConfigRequest, + dict, + ], +) +def test_get_serving_config_rest(request_type): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/servingConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = serving_config.ServingConfig( + name="name_value", + display_name="display_name_value", + solution_type=common.SolutionType.SOLUTION_TYPE_RECOMMENDATION, + model_id="model_id_value", + diversity_level="diversity_level_value", + ranking_expression="ranking_expression_value", + filter_control_ids=["filter_control_ids_value"], + boost_control_ids=["boost_control_ids_value"], + redirect_control_ids=["redirect_control_ids_value"], + synonyms_control_ids=["synonyms_control_ids_value"], + oneway_synonyms_control_ids=["oneway_synonyms_control_ids_value"], + dissociate_control_ids=["dissociate_control_ids_value"], + replacement_control_ids=["replacement_control_ids_value"], + ignore_control_ids=["ignore_control_ids_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = serving_config.ServingConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_serving_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, serving_config.ServingConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.solution_type == common.SolutionType.SOLUTION_TYPE_RECOMMENDATION + assert response.model_id == "model_id_value" + assert response.diversity_level == "diversity_level_value" + assert response.ranking_expression == "ranking_expression_value" + assert response.filter_control_ids == ["filter_control_ids_value"] + assert response.boost_control_ids == ["boost_control_ids_value"] + assert response.redirect_control_ids == ["redirect_control_ids_value"] + assert response.synonyms_control_ids == ["synonyms_control_ids_value"] + assert response.oneway_synonyms_control_ids == ["oneway_synonyms_control_ids_value"] + assert response.dissociate_control_ids == ["dissociate_control_ids_value"] + assert response.replacement_control_ids == ["replacement_control_ids_value"] + assert response.ignore_control_ids == ["ignore_control_ids_value"] + + +def test_get_serving_config_rest_required_fields( + request_type=serving_config_service.GetServingConfigRequest, +): + transport_class = transports.ServingConfigServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_serving_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_serving_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = serving_config.ServingConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = serving_config.ServingConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_serving_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_serving_config_rest_unset_required_fields(): + transport = transports.ServingConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_serving_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_serving_config_rest_interceptors(null_interceptor): + transport = transports.ServingConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ServingConfigServiceRestInterceptor(), + ) + client = ServingConfigServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ServingConfigServiceRestInterceptor, "post_get_serving_config" + ) as post, mock.patch.object( + transports.ServingConfigServiceRestInterceptor, "pre_get_serving_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = serving_config_service.GetServingConfigRequest.pb( + serving_config_service.GetServingConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = serving_config.ServingConfig.to_json( + serving_config.ServingConfig() + ) + + request = serving_config_service.GetServingConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = serving_config.ServingConfig() + + client.get_serving_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_serving_config_rest_bad_request( + transport: str = "rest", request_type=serving_config_service.GetServingConfigRequest +): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/servingConfigs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_serving_config(request) + + +def test_get_serving_config_rest_flattened(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = serving_config.ServingConfig() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/servingConfigs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = serving_config.ServingConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_serving_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/dataStores/*/servingConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_get_serving_config_rest_flattened_error(transport: str = "rest"): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_serving_config( + serving_config_service.GetServingConfigRequest(), + name="name_value", + ) + + +def test_get_serving_config_rest_error(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + serving_config_service.ListServingConfigsRequest, + dict, + ], +) +def test_list_serving_configs_rest(request_type): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = serving_config_service.ListServingConfigsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = serving_config_service.ListServingConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_serving_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServingConfigsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_serving_configs_rest_required_fields( + request_type=serving_config_service.ListServingConfigsRequest, +): + transport_class = transports.ServingConfigServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_serving_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_serving_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = serving_config_service.ListServingConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = serving_config_service.ListServingConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_serving_configs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_serving_configs_rest_unset_required_fields(): + transport = transports.ServingConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_serving_configs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_serving_configs_rest_interceptors(null_interceptor): + transport = transports.ServingConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ServingConfigServiceRestInterceptor(), + ) + client = ServingConfigServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ServingConfigServiceRestInterceptor, "post_list_serving_configs" + ) as post, mock.patch.object( + transports.ServingConfigServiceRestInterceptor, "pre_list_serving_configs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = serving_config_service.ListServingConfigsRequest.pb( + serving_config_service.ListServingConfigsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + serving_config_service.ListServingConfigsResponse.to_json( + serving_config_service.ListServingConfigsResponse() + ) + ) + + request = serving_config_service.ListServingConfigsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = serving_config_service.ListServingConfigsResponse() + + client.list_serving_configs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_serving_configs_rest_bad_request( + transport: str = "rest", + request_type=serving_config_service.ListServingConfigsRequest, +): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_serving_configs(request) + + +def test_list_serving_configs_rest_flattened(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = serving_config_service.ListServingConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = serving_config_service.ListServingConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_serving_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/dataStores/*}/servingConfigs" + % client.transport._host, + args[1], + ) + + +def test_list_serving_configs_rest_flattened_error(transport: str = "rest"): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_serving_configs( + serving_config_service.ListServingConfigsRequest(), + parent="parent_value", + ) + + +def test_list_serving_configs_rest_pager(transport: str = "rest"): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + serving_config_service.ListServingConfigsResponse( + serving_configs=[ + serving_config.ServingConfig(), + serving_config.ServingConfig(), + serving_config.ServingConfig(), + ], + next_page_token="abc", + ), + serving_config_service.ListServingConfigsResponse( + serving_configs=[], + next_page_token="def", + ), + serving_config_service.ListServingConfigsResponse( + serving_configs=[ + serving_config.ServingConfig(), + ], + next_page_token="ghi", + ), + serving_config_service.ListServingConfigsResponse( + serving_configs=[ + serving_config.ServingConfig(), + serving_config.ServingConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + serving_config_service.ListServingConfigsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3" + } + + pager = client.list_serving_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, serving_config.ServingConfig) for i in results) + + pages = list(client.list_serving_configs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ServingConfigServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ServingConfigServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServingConfigServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ServingConfigServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ServingConfigServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ServingConfigServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ServingConfigServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServingConfigServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ServingConfigServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ServingConfigServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ServingConfigServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ServingConfigServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServingConfigServiceGrpcTransport, + transports.ServingConfigServiceGrpcAsyncIOTransport, + transports.ServingConfigServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ServingConfigServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ServingConfigServiceGrpcTransport, + ) + + +def test_serving_config_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ServingConfigServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_serving_config_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.serving_config_service.transports.ServingConfigServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ServingConfigServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "update_serving_config", + "get_serving_config", + "list_serving_configs", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_serving_config_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1beta.services.serving_config_service.transports.ServingConfigServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServingConfigServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_serving_config_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1beta.services.serving_config_service.transports.ServingConfigServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServingConfigServiceTransport() + adc.assert_called_once() + + +def test_serving_config_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ServingConfigServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServingConfigServiceGrpcTransport, + transports.ServingConfigServiceGrpcAsyncIOTransport, + ], +) +def test_serving_config_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServingConfigServiceGrpcTransport, + transports.ServingConfigServiceGrpcAsyncIOTransport, + transports.ServingConfigServiceRestTransport, + ], +) +def test_serving_config_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ServingConfigServiceGrpcTransport, grpc_helpers), + (transports.ServingConfigServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_serving_config_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServingConfigServiceGrpcTransport, + transports.ServingConfigServiceGrpcAsyncIOTransport, + ], +) +def test_serving_config_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_serving_config_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ServingConfigServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_serving_config_service_host_no_port(transport_name): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_serving_config_service_host_with_port(transport_name): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_serving_config_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ServingConfigServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ServingConfigServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.update_serving_config._session + session2 = client2.transport.update_serving_config._session + assert session1 != session2 + session1 = client1.transport.get_serving_config._session + session2 = client2.transport.get_serving_config._session + assert session1 != session2 + session1 = client1.transport.list_serving_configs._session + session2 = client2.transport.list_serving_configs._session + assert session1 != session2 + + +def test_serving_config_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ServingConfigServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_serving_config_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ServingConfigServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServingConfigServiceGrpcTransport, + transports.ServingConfigServiceGrpcAsyncIOTransport, + ], +) +def test_serving_config_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ServingConfigServiceGrpcTransport, + transports.ServingConfigServiceGrpcAsyncIOTransport, + ], +) +def test_serving_config_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_store_path(): + project = "squid" + location = "clam" + data_store = "whelk" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = ServingConfigServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "octopus", + "location": "oyster", + "data_store": "nudibranch", + } + path = ServingConfigServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = ServingConfigServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_serving_config_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + serving_config = "nautilus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/servingConfigs/{serving_config}".format( + project=project, + location=location, + data_store=data_store, + serving_config=serving_config, + ) + actual = ServingConfigServiceClient.serving_config_path( + project, location, data_store, serving_config + ) + assert expected == actual + + +def test_parse_serving_config_path(): + expected = { + "project": "scallop", + "location": "abalone", + "data_store": "squid", + "serving_config": "clam", + } + path = ServingConfigServiceClient.serving_config_path(**expected) + + # Check that the path construction is reversible. + actual = ServingConfigServiceClient.parse_serving_config_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ServingConfigServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ServingConfigServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ServingConfigServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ServingConfigServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ServingConfigServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ServingConfigServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ServingConfigServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ServingConfigServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ServingConfigServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = ServingConfigServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ServingConfigServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ServingConfigServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ServingConfigServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ServingConfigServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ServingConfigServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ServingConfigServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ServingConfigServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ServingConfigServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ServingConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ServingConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ServingConfigServiceClient, transports.ServingConfigServiceGrpcTransport), + ( + ServingConfigServiceAsyncClient, + transports.ServingConfigServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_site_search_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_site_search_engine_service.py new file mode 100644 index 000000000000..7f5f86330768 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_site_search_engine_service.py @@ -0,0 +1,8683 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1beta.services.site_search_engine_service import ( + SiteSearchEngineServiceAsyncClient, + SiteSearchEngineServiceClient, + pagers, + transports, +) +from google.cloud.discoveryengine_v1beta.types import ( + site_search_engine, + site_search_engine_service, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SiteSearchEngineServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SiteSearchEngineServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SiteSearchEngineServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SiteSearchEngineServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SiteSearchEngineServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SiteSearchEngineServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert SiteSearchEngineServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SiteSearchEngineServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert SiteSearchEngineServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + SiteSearchEngineServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert SiteSearchEngineServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert SiteSearchEngineServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert SiteSearchEngineServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + SiteSearchEngineServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert SiteSearchEngineServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert SiteSearchEngineServiceClient._get_client_cert_source(None, False) is None + assert ( + SiteSearchEngineServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + SiteSearchEngineServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + SiteSearchEngineServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + SiteSearchEngineServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + SiteSearchEngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceClient), +) +@mock.patch.object( + SiteSearchEngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = SiteSearchEngineServiceClient._DEFAULT_UNIVERSE + default_endpoint = SiteSearchEngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SiteSearchEngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + SiteSearchEngineServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + SiteSearchEngineServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == SiteSearchEngineServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SiteSearchEngineServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + SiteSearchEngineServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == SiteSearchEngineServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SiteSearchEngineServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == SiteSearchEngineServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SiteSearchEngineServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + SiteSearchEngineServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + SiteSearchEngineServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + SiteSearchEngineServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + SiteSearchEngineServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + SiteSearchEngineServiceClient._get_universe_domain(None, None) + == SiteSearchEngineServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + SiteSearchEngineServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + "grpc", + ), + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SiteSearchEngineServiceClient, "grpc"), + (SiteSearchEngineServiceAsyncClient, "grpc_asyncio"), + (SiteSearchEngineServiceClient, "rest"), + ], +) +def test_site_search_engine_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SiteSearchEngineServiceGrpcTransport, "grpc"), + (transports.SiteSearchEngineServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SiteSearchEngineServiceRestTransport, "rest"), + ], +) +def test_site_search_engine_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SiteSearchEngineServiceClient, "grpc"), + (SiteSearchEngineServiceAsyncClient, "grpc_asyncio"), + (SiteSearchEngineServiceClient, "rest"), + ], +) +def test_site_search_engine_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_site_search_engine_service_client_get_transport_class(): + transport = SiteSearchEngineServiceClient.get_transport_class() + available_transports = [ + transports.SiteSearchEngineServiceGrpcTransport, + transports.SiteSearchEngineServiceRestTransport, + ] + assert transport in available_transports + + transport = SiteSearchEngineServiceClient.get_transport_class("grpc") + assert transport == transports.SiteSearchEngineServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + "grpc", + ), + ( + SiteSearchEngineServiceAsyncClient, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + SiteSearchEngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceClient), +) +@mock.patch.object( + SiteSearchEngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceAsyncClient), +) +def test_site_search_engine_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SiteSearchEngineServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SiteSearchEngineServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + "grpc", + "true", + ), + ( + SiteSearchEngineServiceAsyncClient, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + "grpc", + "false", + ), + ( + SiteSearchEngineServiceAsyncClient, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceRestTransport, + "rest", + "true", + ), + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + SiteSearchEngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceClient), +) +@mock.patch.object( + SiteSearchEngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_site_search_engine_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [SiteSearchEngineServiceClient, SiteSearchEngineServiceAsyncClient] +) +@mock.patch.object( + SiteSearchEngineServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SiteSearchEngineServiceClient), +) +@mock.patch.object( + SiteSearchEngineServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SiteSearchEngineServiceAsyncClient), +) +def test_site_search_engine_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [SiteSearchEngineServiceClient, SiteSearchEngineServiceAsyncClient] +) +@mock.patch.object( + SiteSearchEngineServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceClient), +) +@mock.patch.object( + SiteSearchEngineServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SiteSearchEngineServiceAsyncClient), +) +def test_site_search_engine_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = SiteSearchEngineServiceClient._DEFAULT_UNIVERSE + default_endpoint = SiteSearchEngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SiteSearchEngineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + "grpc", + ), + ( + SiteSearchEngineServiceAsyncClient, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceRestTransport, + "rest", + ), + ], +) +def test_site_search_engine_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SiteSearchEngineServiceAsyncClient, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceRestTransport, + "rest", + None, + ), + ], +) +def test_site_search_engine_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_site_search_engine_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.site_search_engine_service.transports.SiteSearchEngineServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SiteSearchEngineServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SiteSearchEngineServiceAsyncClient, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_site_search_engine_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.GetSiteSearchEngineRequest, + dict, + ], +) +def test_get_site_search_engine(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.SiteSearchEngine( + name="name_value", + ) + response = client.get_site_search_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetSiteSearchEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.SiteSearchEngine) + assert response.name == "name_value" + + +def test_get_site_search_engine_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + client.get_site_search_engine() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetSiteSearchEngineRequest() + + +@pytest.mark.asyncio +async def test_get_site_search_engine_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.GetSiteSearchEngineRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.SiteSearchEngine( + name="name_value", + ) + ) + response = await client.get_site_search_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetSiteSearchEngineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.SiteSearchEngine) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_site_search_engine_async_from_dict(): + await test_get_site_search_engine_async(request_type=dict) + + +def test_get_site_search_engine_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.GetSiteSearchEngineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + call.return_value = site_search_engine.SiteSearchEngine() + client.get_site_search_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_site_search_engine_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.GetSiteSearchEngineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.SiteSearchEngine() + ) + await client.get_site_search_engine(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_site_search_engine_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.SiteSearchEngine() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_site_search_engine( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_site_search_engine_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_site_search_engine( + site_search_engine_service.GetSiteSearchEngineRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_site_search_engine_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_site_search_engine), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.SiteSearchEngine() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.SiteSearchEngine() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_site_search_engine( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_site_search_engine_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_site_search_engine( + site_search_engine_service.GetSiteSearchEngineRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.CreateTargetSiteRequest, + dict, + ], +) +def test_create_target_site(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.CreateTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_target_site_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + client.create_target_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.CreateTargetSiteRequest() + + +@pytest.mark.asyncio +async def test_create_target_site_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.CreateTargetSiteRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.CreateTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_target_site_async_from_dict(): + await test_create_target_site_async(request_type=dict) + + +def test_create_target_site_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.CreateTargetSiteRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_target_site_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.CreateTargetSiteRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_target_site_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_target_site( + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].target_site + mock_val = site_search_engine.TargetSite(name="name_value") + assert arg == mock_val + + +def test_create_target_site_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_target_site( + site_search_engine_service.CreateTargetSiteRequest(), + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_target_site_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_target_site( + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].target_site + mock_val = site_search_engine.TargetSite(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_target_site_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_target_site( + site_search_engine_service.CreateTargetSiteRequest(), + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.BatchCreateTargetSitesRequest, + dict, + ], +) +def test_batch_create_target_sites(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.batch_create_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchCreateTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_batch_create_target_sites_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_target_sites), "__call__" + ) as call: + client.batch_create_target_sites() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchCreateTargetSitesRequest() + + +@pytest.mark.asyncio +async def test_batch_create_target_sites_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.BatchCreateTargetSitesRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.batch_create_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchCreateTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_batch_create_target_sites_async_from_dict(): + await test_batch_create_target_sites_async(request_type=dict) + + +def test_batch_create_target_sites_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.BatchCreateTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_target_sites), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.batch_create_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_create_target_sites_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.BatchCreateTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_target_sites), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.batch_create_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.GetTargetSiteRequest, + dict, + ], +) +def test_get_target_site(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.TargetSite( + name="name_value", + provided_uri_pattern="provided_uri_pattern_value", + type_=site_search_engine.TargetSite.Type.INCLUDE, + exact_match=True, + generated_uri_pattern="generated_uri_pattern_value", + indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, + ) + response = client.get_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.TargetSite) + assert response.name == "name_value" + assert response.provided_uri_pattern == "provided_uri_pattern_value" + assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE + assert response.exact_match is True + assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert ( + response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING + ) + + +def test_get_target_site_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + client.get_target_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetTargetSiteRequest() + + +@pytest.mark.asyncio +async def test_get_target_site_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.GetTargetSiteRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.TargetSite( + name="name_value", + provided_uri_pattern="provided_uri_pattern_value", + type_=site_search_engine.TargetSite.Type.INCLUDE, + exact_match=True, + generated_uri_pattern="generated_uri_pattern_value", + indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, + ) + ) + response = await client.get_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.GetTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.TargetSite) + assert response.name == "name_value" + assert response.provided_uri_pattern == "provided_uri_pattern_value" + assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE + assert response.exact_match is True + assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert ( + response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING + ) + + +@pytest.mark.asyncio +async def test_get_target_site_async_from_dict(): + await test_get_target_site_async(request_type=dict) + + +def test_get_target_site_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.GetTargetSiteRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + call.return_value = site_search_engine.TargetSite() + client.get_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_target_site_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.GetTargetSiteRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.TargetSite() + ) + await client.get_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_target_site_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.TargetSite() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_target_site( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_target_site_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_target_site( + site_search_engine_service.GetTargetSiteRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_target_site_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_target_site), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine.TargetSite() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine.TargetSite() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_target_site( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_target_site_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_target_site( + site_search_engine_service.GetTargetSiteRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.UpdateTargetSiteRequest, + dict, + ], +) +def test_update_target_site(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.UpdateTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_target_site_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + client.update_target_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.UpdateTargetSiteRequest() + + +@pytest.mark.asyncio +async def test_update_target_site_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.UpdateTargetSiteRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.UpdateTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_target_site_async_from_dict(): + await test_update_target_site_async(request_type=dict) + + +def test_update_target_site_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.UpdateTargetSiteRequest() + + request.target_site.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "target_site.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_target_site_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.UpdateTargetSiteRequest() + + request.target_site.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "target_site.name=name_value", + ) in kw["metadata"] + + +def test_update_target_site_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_target_site( + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].target_site + mock_val = site_search_engine.TargetSite(name="name_value") + assert arg == mock_val + + +def test_update_target_site_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_target_site( + site_search_engine_service.UpdateTargetSiteRequest(), + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_update_target_site_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_target_site( + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].target_site + mock_val = site_search_engine.TargetSite(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_target_site_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_target_site( + site_search_engine_service.UpdateTargetSiteRequest(), + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.DeleteTargetSiteRequest, + dict, + ], +) +def test_delete_target_site(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DeleteTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_target_site_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + client.delete_target_site() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DeleteTargetSiteRequest() + + +@pytest.mark.asyncio +async def test_delete_target_site_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.DeleteTargetSiteRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DeleteTargetSiteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_target_site_async_from_dict(): + await test_delete_target_site_async(request_type=dict) + + +def test_delete_target_site_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.DeleteTargetSiteRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_target_site_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.DeleteTargetSiteRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_target_site(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_target_site_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_target_site( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_target_site_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_target_site( + site_search_engine_service.DeleteTargetSiteRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_target_site_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_target_site), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_target_site( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_target_site_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_target_site( + site_search_engine_service.DeleteTargetSiteRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.ListTargetSitesRequest, + dict, + ], +) +def test_list_target_sites(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine_service.ListTargetSitesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + response = client.list_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.ListTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTargetSitesPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_target_sites_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + client.list_target_sites() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.ListTargetSitesRequest() + + +@pytest.mark.asyncio +async def test_list_target_sites_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.ListTargetSitesRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine_service.ListTargetSitesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.list_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.ListTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTargetSitesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_list_target_sites_async_from_dict(): + await test_list_target_sites_async(request_type=dict) + + +def test_list_target_sites_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.ListTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + call.return_value = site_search_engine_service.ListTargetSitesResponse() + client.list_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_target_sites_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.ListTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine_service.ListTargetSitesResponse() + ) + await client.list_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_target_sites_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine_service.ListTargetSitesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_target_sites( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_target_sites_flattened_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_target_sites( + site_search_engine_service.ListTargetSitesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_target_sites_flattened_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = site_search_engine_service.ListTargetSitesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine_service.ListTargetSitesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_target_sites( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_target_sites_flattened_error_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_target_sites( + site_search_engine_service.ListTargetSitesRequest(), + parent="parent_value", + ) + + +def test_list_target_sites_pager(transport_name: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_target_sites(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in results) + + +def test_list_target_sites_pages(transport_name: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + pages = list(client.list_target_sites(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_target_sites_async_pager(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_target_sites( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in responses) + + +@pytest.mark.asyncio +async def test_list_target_sites_async_pages(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_target_sites), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_target_sites(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.EnableAdvancedSiteSearchRequest, + dict, + ], +) +def test_enable_advanced_site_search(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_advanced_site_search), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.enable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.EnableAdvancedSiteSearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_enable_advanced_site_search_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_advanced_site_search), "__call__" + ) as call: + client.enable_advanced_site_search() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.EnableAdvancedSiteSearchRequest() + + +@pytest.mark.asyncio +async def test_enable_advanced_site_search_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.EnableAdvancedSiteSearchRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_advanced_site_search), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.enable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.EnableAdvancedSiteSearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_enable_advanced_site_search_async_from_dict(): + await test_enable_advanced_site_search_async(request_type=dict) + + +def test_enable_advanced_site_search_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.EnableAdvancedSiteSearchRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_advanced_site_search), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.enable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_enable_advanced_site_search_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.EnableAdvancedSiteSearchRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enable_advanced_site_search), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.enable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.DisableAdvancedSiteSearchRequest, + dict, + ], +) +def test_disable_advanced_site_search(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_advanced_site_search), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.disable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DisableAdvancedSiteSearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_disable_advanced_site_search_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_advanced_site_search), "__call__" + ) as call: + client.disable_advanced_site_search() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DisableAdvancedSiteSearchRequest() + + +@pytest.mark.asyncio +async def test_disable_advanced_site_search_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.DisableAdvancedSiteSearchRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_advanced_site_search), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.disable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.DisableAdvancedSiteSearchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_disable_advanced_site_search_async_from_dict(): + await test_disable_advanced_site_search_async(request_type=dict) + + +def test_disable_advanced_site_search_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.DisableAdvancedSiteSearchRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_advanced_site_search), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.disable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_disable_advanced_site_search_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.DisableAdvancedSiteSearchRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.disable_advanced_site_search), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.disable_advanced_site_search(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.RecrawlUrisRequest, + dict, + ], +) +def test_recrawl_uris(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.recrawl_uris(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.RecrawlUrisRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_recrawl_uris_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + client.recrawl_uris() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.RecrawlUrisRequest() + + +@pytest.mark.asyncio +async def test_recrawl_uris_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.RecrawlUrisRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.recrawl_uris(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.RecrawlUrisRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_recrawl_uris_async_from_dict(): + await test_recrawl_uris_async(request_type=dict) + + +def test_recrawl_uris_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.RecrawlUrisRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.recrawl_uris(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_recrawl_uris_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.RecrawlUrisRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.recrawl_uris), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.recrawl_uris(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.BatchVerifyTargetSitesRequest, + dict, + ], +) +def test_batch_verify_target_sites(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_verify_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.batch_verify_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchVerifyTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_batch_verify_target_sites_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_verify_target_sites), "__call__" + ) as call: + client.batch_verify_target_sites() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchVerifyTargetSitesRequest() + + +@pytest.mark.asyncio +async def test_batch_verify_target_sites_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.BatchVerifyTargetSitesRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_verify_target_sites), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.batch_verify_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == site_search_engine_service.BatchVerifyTargetSitesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_batch_verify_target_sites_async_from_dict(): + await test_batch_verify_target_sites_async(request_type=dict) + + +def test_batch_verify_target_sites_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.BatchVerifyTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_verify_target_sites), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.batch_verify_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_verify_target_sites_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.BatchVerifyTargetSitesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_verify_target_sites), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.batch_verify_target_sites(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.FetchDomainVerificationStatusRequest, + dict, + ], +) +def test_fetch_domain_verification_status(request_type, transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = client.fetch_domain_verification_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert ( + args[0] == site_search_engine_service.FetchDomainVerificationStatusRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchDomainVerificationStatusPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_fetch_domain_verification_status_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + client.fetch_domain_verification_status() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] == site_search_engine_service.FetchDomainVerificationStatusRequest() + ) + + +@pytest.mark.asyncio +async def test_fetch_domain_verification_status_async( + transport: str = "grpc_asyncio", + request_type=site_search_engine_service.FetchDomainVerificationStatusRequest, +): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine_service.FetchDomainVerificationStatusResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.fetch_domain_verification_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert ( + args[0] == site_search_engine_service.FetchDomainVerificationStatusRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchDomainVerificationStatusAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_fetch_domain_verification_status_async_from_dict(): + await test_fetch_domain_verification_status_async(request_type=dict) + + +def test_fetch_domain_verification_status_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.FetchDomainVerificationStatusRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + call.return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse() + ) + client.fetch_domain_verification_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_domain_verification_status_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = site_search_engine_service.FetchDomainVerificationStatusRequest() + + request.site_search_engine = "site_search_engine_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + site_search_engine_service.FetchDomainVerificationStatusResponse() + ) + await client.fetch_domain_verification_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "site_search_engine=site_search_engine_value", + ) in kw["metadata"] + + +def test_fetch_domain_verification_status_pager(transport_name: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("site_search_engine", ""),)), + ) + pager = client.fetch_domain_verification_status(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in results) + + +def test_fetch_domain_verification_status_pages(transport_name: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_domain_verification_status(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_fetch_domain_verification_status_async_pager(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_domain_verification_status( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in responses) + + +@pytest.mark.asyncio +async def test_fetch_domain_verification_status_async_pages(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_domain_verification_status), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.fetch_domain_verification_status(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.GetSiteSearchEngineRequest, + dict, + ], +) +def test_get_site_search_engine_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine.SiteSearchEngine( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine.SiteSearchEngine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_site_search_engine(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.SiteSearchEngine) + assert response.name == "name_value" + + +def test_get_site_search_engine_rest_required_fields( + request_type=site_search_engine_service.GetSiteSearchEngineRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_site_search_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_site_search_engine._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = site_search_engine.SiteSearchEngine() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = site_search_engine.SiteSearchEngine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_site_search_engine(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_site_search_engine_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_site_search_engine._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_site_search_engine_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_get_site_search_engine" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_get_site_search_engine" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.GetSiteSearchEngineRequest.pb( + site_search_engine_service.GetSiteSearchEngineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = site_search_engine.SiteSearchEngine.to_json( + site_search_engine.SiteSearchEngine() + ) + + request = site_search_engine_service.GetSiteSearchEngineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = site_search_engine.SiteSearchEngine() + + client.get_site_search_engine( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_site_search_engine_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.GetSiteSearchEngineRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_site_search_engine(request) + + +def test_get_site_search_engine_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine.SiteSearchEngine() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine.SiteSearchEngine.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_site_search_engine(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/dataStores/*/siteSearchEngine}" + % client.transport._host, + args[1], + ) + + +def test_get_site_search_engine_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_site_search_engine( + site_search_engine_service.GetSiteSearchEngineRequest(), + name="name_value", + ) + + +def test_get_site_search_engine_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.CreateTargetSiteRequest, + dict, + ], +) +def test_create_target_site_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request_init["target_site"] = { + "name": "name_value", + "provided_uri_pattern": "provided_uri_pattern_value", + "type_": 1, + "exact_match": True, + "generated_uri_pattern": "generated_uri_pattern_value", + "site_verification_info": { + "site_verification_state": 1, + "verify_time": {"seconds": 751, "nanos": 543}, + }, + "indexing_status": 1, + "update_time": {}, + "failure_reason": {"quota_failure": {"total_required_quota": 2157}}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = site_search_engine_service.CreateTargetSiteRequest.meta.fields[ + "target_site" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["target_site"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target_site"][field])): + del request_init["target_site"][field][i][subfield] + else: + del request_init["target_site"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_target_site(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_target_site_rest_required_fields( + request_type=site_search_engine_service.CreateTargetSiteRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_target_site(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_target_site_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_target_site._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "targetSite", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_target_site_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_create_target_site" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_create_target_site" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.CreateTargetSiteRequest.pb( + site_search_engine_service.CreateTargetSiteRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.CreateTargetSiteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_target_site( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_target_site_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.CreateTargetSiteRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_target_site(request) + + +def test_create_target_site_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_target_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/dataStores/*/siteSearchEngine}/targetSites" + % client.transport._host, + args[1], + ) + + +def test_create_target_site_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_target_site( + site_search_engine_service.CreateTargetSiteRequest(), + parent="parent_value", + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +def test_create_target_site_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.BatchCreateTargetSitesRequest, + dict, + ], +) +def test_batch_create_target_sites_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_create_target_sites(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_create_target_sites_rest_required_fields( + request_type=site_search_engine_service.BatchCreateTargetSitesRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_target_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_target_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_create_target_sites(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_create_target_sites_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_create_target_sites._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_create_target_sites_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_batch_create_target_sites", + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "pre_batch_create_target_sites", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.BatchCreateTargetSitesRequest.pb( + site_search_engine_service.BatchCreateTargetSitesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.BatchCreateTargetSitesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_create_target_sites( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_create_target_sites_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.BatchCreateTargetSitesRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_create_target_sites(request) + + +def test_batch_create_target_sites_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.GetTargetSiteRequest, + dict, + ], +) +def test_get_target_site_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine.TargetSite( + name="name_value", + provided_uri_pattern="provided_uri_pattern_value", + type_=site_search_engine.TargetSite.Type.INCLUDE, + exact_match=True, + generated_uri_pattern="generated_uri_pattern_value", + indexing_status=site_search_engine.TargetSite.IndexingStatus.PENDING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine.TargetSite.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_target_site(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, site_search_engine.TargetSite) + assert response.name == "name_value" + assert response.provided_uri_pattern == "provided_uri_pattern_value" + assert response.type_ == site_search_engine.TargetSite.Type.INCLUDE + assert response.exact_match is True + assert response.generated_uri_pattern == "generated_uri_pattern_value" + assert ( + response.indexing_status == site_search_engine.TargetSite.IndexingStatus.PENDING + ) + + +def test_get_target_site_rest_required_fields( + request_type=site_search_engine_service.GetTargetSiteRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = site_search_engine.TargetSite() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = site_search_engine.TargetSite.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_target_site(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_target_site_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_target_site._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_target_site_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_get_target_site" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_get_target_site" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.GetTargetSiteRequest.pb( + site_search_engine_service.GetTargetSiteRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = site_search_engine.TargetSite.to_json( + site_search_engine.TargetSite() + ) + + request = site_search_engine_service.GetTargetSiteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = site_search_engine.TargetSite() + + client.get_target_site( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_target_site_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.GetTargetSiteRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_target_site(request) + + +def test_get_target_site_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine.TargetSite() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine.TargetSite.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_target_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}" + % client.transport._host, + args[1], + ) + + +def test_get_target_site_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_target_site( + site_search_engine_service.GetTargetSiteRequest(), + name="name_value", + ) + + +def test_get_target_site_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.UpdateTargetSiteRequest, + dict, + ], +) +def test_update_target_site_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "target_site": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + } + request_init["target_site"] = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4", + "provided_uri_pattern": "provided_uri_pattern_value", + "type_": 1, + "exact_match": True, + "generated_uri_pattern": "generated_uri_pattern_value", + "site_verification_info": { + "site_verification_state": 1, + "verify_time": {"seconds": 751, "nanos": 543}, + }, + "indexing_status": 1, + "update_time": {}, + "failure_reason": {"quota_failure": {"total_required_quota": 2157}}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = site_search_engine_service.UpdateTargetSiteRequest.meta.fields[ + "target_site" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["target_site"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["target_site"][field])): + del request_init["target_site"][field][i][subfield] + else: + del request_init["target_site"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_target_site(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_target_site_rest_required_fields( + request_type=site_search_engine_service.UpdateTargetSiteRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_target_site(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_target_site_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_target_site._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("targetSite",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_target_site_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_update_target_site" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_update_target_site" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.UpdateTargetSiteRequest.pb( + site_search_engine_service.UpdateTargetSiteRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.UpdateTargetSiteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_target_site( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_target_site_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.UpdateTargetSiteRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "target_site": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_target_site(request) + + +def test_update_target_site_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "target_site": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + target_site=site_search_engine.TargetSite(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_target_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{target_site.name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}" + % client.transport._host, + args[1], + ) + + +def test_update_target_site_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_target_site( + site_search_engine_service.UpdateTargetSiteRequest(), + target_site=site_search_engine.TargetSite(name="name_value"), + ) + + +def test_update_target_site_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.DeleteTargetSiteRequest, + dict, + ], +) +def test_delete_target_site_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_target_site(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_target_site_rest_required_fields( + request_type=site_search_engine_service.DeleteTargetSiteRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_target_site._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_target_site(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_target_site_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_target_site._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_target_site_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_delete_target_site" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_delete_target_site" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.DeleteTargetSiteRequest.pb( + site_search_engine_service.DeleteTargetSiteRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.DeleteTargetSiteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_target_site( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_target_site_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.DeleteTargetSiteRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_target_site(request) + + +def test_delete_target_site_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine/targetSites/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_target_site(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/dataStores/*/siteSearchEngine/targetSites/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_target_site_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_target_site( + site_search_engine_service.DeleteTargetSiteRequest(), + name="name_value", + ) + + +def test_delete_target_site_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.ListTargetSitesRequest, + dict, + ], +) +def test_list_target_sites_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine_service.ListTargetSitesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine_service.ListTargetSitesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_target_sites(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTargetSitesPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_target_sites_rest_required_fields( + request_type=site_search_engine_service.ListTargetSitesRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_target_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_target_sites._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = site_search_engine_service.ListTargetSitesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = site_search_engine_service.ListTargetSitesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_target_sites(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_target_sites_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_target_sites._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_target_sites_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_list_target_sites" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_list_target_sites" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.ListTargetSitesRequest.pb( + site_search_engine_service.ListTargetSitesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + site_search_engine_service.ListTargetSitesResponse.to_json( + site_search_engine_service.ListTargetSitesResponse() + ) + ) + + request = site_search_engine_service.ListTargetSitesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = site_search_engine_service.ListTargetSitesResponse() + + client.list_target_sites( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_target_sites_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.ListTargetSitesRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_target_sites(request) + + +def test_list_target_sites_rest_flattened(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine_service.ListTargetSitesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = site_search_engine_service.ListTargetSitesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_target_sites(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/dataStores/*/siteSearchEngine}/targetSites" + % client.transport._host, + args[1], + ) + + +def test_list_target_sites_rest_flattened_error(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_target_sites( + site_search_engine_service.ListTargetSitesRequest(), + parent="parent_value", + ) + + +def test_list_target_sites_rest_pager(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.ListTargetSitesResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + site_search_engine_service.ListTargetSitesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + + pager = client.list_target_sites(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in results) + + pages = list(client.list_target_sites(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.EnableAdvancedSiteSearchRequest, + dict, + ], +) +def test_enable_advanced_site_search_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.enable_advanced_site_search(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_enable_advanced_site_search_rest_required_fields( + request_type=site_search_engine_service.EnableAdvancedSiteSearchRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["site_search_engine"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_advanced_site_search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["siteSearchEngine"] = "site_search_engine_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_advanced_site_search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "siteSearchEngine" in jsonified_request + assert jsonified_request["siteSearchEngine"] == "site_search_engine_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.enable_advanced_site_search(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_enable_advanced_site_search_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.enable_advanced_site_search._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("siteSearchEngine",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enable_advanced_site_search_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_enable_advanced_site_search", + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "pre_enable_advanced_site_search", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.EnableAdvancedSiteSearchRequest.pb( + site_search_engine_service.EnableAdvancedSiteSearchRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.EnableAdvancedSiteSearchRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.enable_advanced_site_search( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_enable_advanced_site_search_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.EnableAdvancedSiteSearchRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_advanced_site_search(request) + + +def test_enable_advanced_site_search_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.DisableAdvancedSiteSearchRequest, + dict, + ], +) +def test_disable_advanced_site_search_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.disable_advanced_site_search(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_disable_advanced_site_search_rest_required_fields( + request_type=site_search_engine_service.DisableAdvancedSiteSearchRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["site_search_engine"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_advanced_site_search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["siteSearchEngine"] = "site_search_engine_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_advanced_site_search._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "siteSearchEngine" in jsonified_request + assert jsonified_request["siteSearchEngine"] == "site_search_engine_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.disable_advanced_site_search(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_disable_advanced_site_search_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.disable_advanced_site_search._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("siteSearchEngine",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_disable_advanced_site_search_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_disable_advanced_site_search", + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "pre_disable_advanced_site_search", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.DisableAdvancedSiteSearchRequest.pb( + site_search_engine_service.DisableAdvancedSiteSearchRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.DisableAdvancedSiteSearchRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.disable_advanced_site_search( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_disable_advanced_site_search_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.DisableAdvancedSiteSearchRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.disable_advanced_site_search(request) + + +def test_disable_advanced_site_search_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.RecrawlUrisRequest, + dict, + ], +) +def test_recrawl_uris_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.recrawl_uris(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_recrawl_uris_rest_required_fields( + request_type=site_search_engine_service.RecrawlUrisRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["site_search_engine"] = "" + request_init["uris"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).recrawl_uris._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["siteSearchEngine"] = "site_search_engine_value" + jsonified_request["uris"] = "uris_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).recrawl_uris._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "siteSearchEngine" in jsonified_request + assert jsonified_request["siteSearchEngine"] == "site_search_engine_value" + assert "uris" in jsonified_request + assert jsonified_request["uris"] == "uris_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.recrawl_uris(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_recrawl_uris_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.recrawl_uris._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "siteSearchEngine", + "uris", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_recrawl_uris_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "post_recrawl_uris" + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, "pre_recrawl_uris" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.RecrawlUrisRequest.pb( + site_search_engine_service.RecrawlUrisRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.RecrawlUrisRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.recrawl_uris( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_recrawl_uris_rest_bad_request( + transport: str = "rest", request_type=site_search_engine_service.RecrawlUrisRequest +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/dataStores/sample3/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.recrawl_uris(request) + + +def test_recrawl_uris_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.BatchVerifyTargetSitesRequest, + dict, + ], +) +def test_batch_verify_target_sites_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_verify_target_sites(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_verify_target_sites_rest_required_fields( + request_type=site_search_engine_service.BatchVerifyTargetSitesRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_verify_target_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_verify_target_sites._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_verify_target_sites(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_verify_target_sites_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_verify_target_sites._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_verify_target_sites_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_batch_verify_target_sites", + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "pre_batch_verify_target_sites", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.BatchVerifyTargetSitesRequest.pb( + site_search_engine_service.BatchVerifyTargetSitesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = site_search_engine_service.BatchVerifyTargetSitesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_verify_target_sites( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_verify_target_sites_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.BatchVerifyTargetSitesRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_verify_target_sites(request) + + +def test_batch_verify_target_sites_rest_error(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + site_search_engine_service.FetchDomainVerificationStatusRequest, + dict, + ], +) +def test_fetch_domain_verification_status_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = site_search_engine_service.FetchDomainVerificationStatusResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_domain_verification_status(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchDomainVerificationStatusPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_fetch_domain_verification_status_rest_required_fields( + request_type=site_search_engine_service.FetchDomainVerificationStatusRequest, +): + transport_class = transports.SiteSearchEngineServiceRestTransport + + request_init = {} + request_init["site_search_engine"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_domain_verification_status._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["siteSearchEngine"] = "site_search_engine_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_domain_verification_status._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "siteSearchEngine" in jsonified_request + assert jsonified_request["siteSearchEngine"] == "site_search_engine_value" + + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = site_search_engine_service.FetchDomainVerificationStatusResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_domain_verification_status(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_domain_verification_status_rest_unset_required_fields(): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.fetch_domain_verification_status._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("siteSearchEngine",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_domain_verification_status_rest_interceptors(null_interceptor): + transport = transports.SiteSearchEngineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SiteSearchEngineServiceRestInterceptor(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_fetch_domain_verification_status", + ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "pre_fetch_domain_verification_status", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = site_search_engine_service.FetchDomainVerificationStatusRequest.pb( + site_search_engine_service.FetchDomainVerificationStatusRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + site_search_engine_service.FetchDomainVerificationStatusResponse.to_json( + site_search_engine_service.FetchDomainVerificationStatusResponse() + ) + ) + + request = site_search_engine_service.FetchDomainVerificationStatusRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse() + ) + + client.fetch_domain_verification_status( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_domain_verification_status_rest_bad_request( + transport: str = "rest", + request_type=site_search_engine_service.FetchDomainVerificationStatusRequest, +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "site_search_engine": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/siteSearchEngine" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_domain_verification_status(request) + + +def test_fetch_domain_verification_status_rest_pager(transport: str = "rest"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + next_page_token="abc", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[], + next_page_token="def", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + ], + next_page_token="ghi", + ), + site_search_engine_service.FetchDomainVerificationStatusResponse( + target_sites=[ + site_search_engine.TargetSite(), + site_search_engine.TargetSite(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + site_search_engine_service.FetchDomainVerificationStatusResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "site_search_engine": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/siteSearchEngine" + } + + pager = client.fetch_domain_verification_status(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, site_search_engine.TargetSite) for i in results) + + pages = list( + client.fetch_domain_verification_status(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SiteSearchEngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SiteSearchEngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SiteSearchEngineServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SiteSearchEngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SiteSearchEngineServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SiteSearchEngineServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SiteSearchEngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SiteSearchEngineServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SiteSearchEngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SiteSearchEngineServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SiteSearchEngineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SiteSearchEngineServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SiteSearchEngineServiceGrpcTransport, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + transports.SiteSearchEngineServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = SiteSearchEngineServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SiteSearchEngineServiceGrpcTransport, + ) + + +def test_site_search_engine_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SiteSearchEngineServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_site_search_engine_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1beta.services.site_search_engine_service.transports.SiteSearchEngineServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SiteSearchEngineServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_site_search_engine", + "create_target_site", + "batch_create_target_sites", + "get_target_site", + "update_target_site", + "delete_target_site", + "list_target_sites", + "enable_advanced_site_search", + "disable_advanced_site_search", + "recrawl_uris", + "batch_verify_target_sites", + "fetch_domain_verification_status", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_site_search_engine_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1beta.services.site_search_engine_service.transports.SiteSearchEngineServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SiteSearchEngineServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_site_search_engine_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1beta.services.site_search_engine_service.transports.SiteSearchEngineServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SiteSearchEngineServiceTransport() + adc.assert_called_once() + + +def test_site_search_engine_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SiteSearchEngineServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SiteSearchEngineServiceGrpcTransport, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + ], +) +def test_site_search_engine_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SiteSearchEngineServiceGrpcTransport, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + transports.SiteSearchEngineServiceRestTransport, + ], +) +def test_site_search_engine_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SiteSearchEngineServiceGrpcTransport, grpc_helpers), + (transports.SiteSearchEngineServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_site_search_engine_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SiteSearchEngineServiceGrpcTransport, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + ], +) +def test_site_search_engine_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_site_search_engine_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SiteSearchEngineServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_site_search_engine_service_rest_lro_client(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_site_search_engine_service_host_no_port(transport_name): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_site_search_engine_service_host_with_port(transport_name): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_site_search_engine_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SiteSearchEngineServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SiteSearchEngineServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_site_search_engine._session + session2 = client2.transport.get_site_search_engine._session + assert session1 != session2 + session1 = client1.transport.create_target_site._session + session2 = client2.transport.create_target_site._session + assert session1 != session2 + session1 = client1.transport.batch_create_target_sites._session + session2 = client2.transport.batch_create_target_sites._session + assert session1 != session2 + session1 = client1.transport.get_target_site._session + session2 = client2.transport.get_target_site._session + assert session1 != session2 + session1 = client1.transport.update_target_site._session + session2 = client2.transport.update_target_site._session + assert session1 != session2 + session1 = client1.transport.delete_target_site._session + session2 = client2.transport.delete_target_site._session + assert session1 != session2 + session1 = client1.transport.list_target_sites._session + session2 = client2.transport.list_target_sites._session + assert session1 != session2 + session1 = client1.transport.enable_advanced_site_search._session + session2 = client2.transport.enable_advanced_site_search._session + assert session1 != session2 + session1 = client1.transport.disable_advanced_site_search._session + session2 = client2.transport.disable_advanced_site_search._session + assert session1 != session2 + session1 = client1.transport.recrawl_uris._session + session2 = client2.transport.recrawl_uris._session + assert session1 != session2 + session1 = client1.transport.batch_verify_target_sites._session + session2 = client2.transport.batch_verify_target_sites._session + assert session1 != session2 + session1 = client1.transport.fetch_domain_verification_status._session + session2 = client2.transport.fetch_domain_verification_status._session + assert session1 != session2 + + +def test_site_search_engine_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SiteSearchEngineServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_site_search_engine_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SiteSearchEngineServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SiteSearchEngineServiceGrpcTransport, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + ], +) +def test_site_search_engine_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SiteSearchEngineServiceGrpcTransport, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + ], +) +def test_site_search_engine_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_site_search_engine_service_grpc_lro_client(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_site_search_engine_service_grpc_lro_async_client(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_site_search_engine_path(): + project = "squid" + location = "clam" + data_store = "whelk" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/siteSearchEngine".format( + project=project, + location=location, + data_store=data_store, + ) + actual = SiteSearchEngineServiceClient.site_search_engine_path( + project, location, data_store + ) + assert expected == actual + + +def test_parse_site_search_engine_path(): + expected = { + "project": "octopus", + "location": "oyster", + "data_store": "nudibranch", + } + path = SiteSearchEngineServiceClient.site_search_engine_path(**expected) + + # Check that the path construction is reversible. + actual = SiteSearchEngineServiceClient.parse_site_search_engine_path(path) + assert expected == actual + + +def test_target_site_path(): + project = "cuttlefish" + location = "mussel" + data_store = "winkle" + target_site = "nautilus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/siteSearchEngine/targetSites/{target_site}".format( + project=project, + location=location, + data_store=data_store, + target_site=target_site, + ) + actual = SiteSearchEngineServiceClient.target_site_path( + project, location, data_store, target_site + ) + assert expected == actual + + +def test_parse_target_site_path(): + expected = { + "project": "scallop", + "location": "abalone", + "data_store": "squid", + "target_site": "clam", + } + path = SiteSearchEngineServiceClient.target_site_path(**expected) + + # Check that the path construction is reversible. + actual = SiteSearchEngineServiceClient.parse_target_site_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SiteSearchEngineServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = SiteSearchEngineServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SiteSearchEngineServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SiteSearchEngineServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = SiteSearchEngineServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SiteSearchEngineServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SiteSearchEngineServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = SiteSearchEngineServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SiteSearchEngineServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = SiteSearchEngineServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = SiteSearchEngineServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SiteSearchEngineServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SiteSearchEngineServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = SiteSearchEngineServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SiteSearchEngineServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SiteSearchEngineServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SiteSearchEngineServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SiteSearchEngineServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = SiteSearchEngineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = SiteSearchEngineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + SiteSearchEngineServiceClient, + transports.SiteSearchEngineServiceGrpcTransport, + ), + ( + SiteSearchEngineServiceAsyncClient, + transports.SiteSearchEngineServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py index 8023f6a0a8f5..4f05f6ca73cd 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py @@ -3281,7 +3281,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" }, request, ) @@ -3311,7 +3311,7 @@ def test_get_operation_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5/operations/sample6" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector/operations/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3344,7 +3344,7 @@ def test_list_operations_rest_bad_request( request = request_type() request = json_format.ParseDict( { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" }, request, ) @@ -3374,7 +3374,7 @@ def test_list_operations_rest(request_type): transport="rest", ) request_init = { - "name": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4/branches/sample5" + "name": "projects/sample1/locations/sample2/collections/sample3/dataConnector" } request = request_type(**request_init) # Mock the http request call within the method and fake a response.